{
	"./rwkv-x-dev/1_0-c1-290_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6341600901916573,
						"acc_norm,none": 0.6265501691093573,
						"acc_norm_stderr,none": 0.09033171989254664,
						"acc_stderr,none": 0.10941779378117691,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.34375,
						"acc_stderr,none": 0.014756066131813472,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8159402985074626,
						"acc_stderr,none": 0.17302192278297318,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2696425487825937,
						"acc_norm,none": 0.2696425487825937,
						"acc_norm_stderr,none": 0.04118720900076286,
						"acc_stderr,none": 0.04118720900076286,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5059552167698904,
						"acc_stderr,none": 0.009090808207026093,
						"alias": "glue",
						"f1,none": 0.631020281787408,
						"f1_stderr,none": 0.000349179192045029,
						"mcc,none": -0.020702674026557004,
						"mcc_stderr,none": 0.013133767897305228
					},
					"lambada": {
						"acc,none": 0.7108480496797982,
						"acc_stderr,none": 0.017205836842963076,
						"alias": "lambada",
						"perplexity,none": 3.8486899641486945,
						"perplexity_stderr,none": 0.24564821402052592
					},
					"lambada_multilingual": {
						"acc,none": 0.5304094702115273,
						"acc_stderr,none": 0.08721800425120818,
						"alias": "lambada_multilingual",
						"perplexity,none": 22.172057696255198,
						"perplexity_stderr,none": 8.836768749113485
					},
					"mmlu": {
						"acc,none": 0.3045150263495229,
						"acc_stderr,none": 0.04953555240610539,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.28969181721572795,
						"acc_stderr,none": 0.03824850282109334,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3392339877695526,
						"acc_stderr,none": 0.04288143285779722,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3149171270718232,
						"acc_stderr,none": 0.048141873697114604,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.28227085315572475,
						"acc_stderr,none": 0.05990992504262538,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.48221428571428565,
						"acc_stderr,none": 0.057434463371788064,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7232217829104947,
						"acc_norm,none": 0.6303705111861454,
						"acc_norm_stderr,none": 0.010208178284621705,
						"acc_stderr,none": 0.1602628911159029,
						"alias": "pythia",
						"bits_per_byte,none": 0.6348094282831127,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.55273262940533,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3839058841861647,
						"perplexity_stderr,none": 0.06685814984543348,
						"word_perplexity,none": 10.516713929892898,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.30634483522785527,
						"acc_stderr,none": 0.0013926130044284689,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2974296205630355,
						"bleu_acc_stderr,none": 0.016002651487361016,
						"bleu_diff,none": -9.386974915039055,
						"bleu_diff_stderr,none": 0.866556280635364,
						"bleu_max,none": 25.948669712165213,
						"bleu_max_stderr,none": 0.7963384311288975,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.015321821688476196,
						"rouge1_diff,none": -11.811086736186299,
						"rouge1_diff_stderr,none": 0.9239436174679174,
						"rouge1_max,none": 50.80703114109932,
						"rouge1_max_stderr,none": 0.8861622869607654,
						"rouge2_acc,none": 0.22888616891064872,
						"rouge2_acc_stderr,none": 0.014706994909055027,
						"rouge2_diff,none": -14.2006556317876,
						"rouge2_diff_stderr,none": 1.113872956349568,
						"rouge2_max,none": 34.45515931973113,
						"rouge2_max_stderr,none": 1.0274050524758227,
						"rougeL_acc,none": 0.2594859241126071,
						"rougeL_acc_stderr,none": 0.015345409485557973,
						"rougeL_diff,none": -12.19469458488383,
						"rougeL_diff_stderr,none": 0.9305925303401709,
						"rougeL_max,none": 47.90848096362526,
						"rougeL_max_stderr,none": 0.9049994754821786
					},
					"xcopa": {
						"acc,none": 0.6187272727272727,
						"acc_stderr,none": 0.07173745845333644,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43488621151271756,
						"acc_stderr,none": 0.04991122649696771,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6225858853257926,
						"acc_stderr,none": 0.06282041343261932,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8105192178017532,
						"acc_stderr,none": 0.03747749626431369,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6341600901916573,
						"acc_norm,none": 0.6265501691093573,
						"acc_norm_stderr,none": 0.09033171989254664,
						"acc_stderr,none": 0.10941779378117691,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.34375,
						"acc_stderr,none": 0.014756066131813472,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.339,
						"acc_stderr,none": 0.014976758771620347,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.343,
						"acc_stderr,none": 0.015019206922356951,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.34833333333333333,
						"acc_stderr,none": 0.013759437498874068,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.40273037542662116,
						"acc_norm,none": 0.43600682593856654,
						"acc_norm_stderr,none": 0.014491225699230916,
						"acc_stderr,none": 0.014332236306790144,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7483164983164983,
						"acc_norm,none": 0.7205387205387206,
						"acc_norm_stderr,none": 0.00920783814259724,
						"acc_stderr,none": 0.008905088235948759,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8159402985074626,
						"acc_stderr,none": 0.17302192278297318,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045057,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346933,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.01188449583454167,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662747,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.776,
						"acc_stderr,none": 0.01319083007236447,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.533,
						"acc_stderr,none": 0.015784807891138782,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.789,
						"acc_stderr,none": 0.012909130321042088,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.011266140684632176,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469293,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.00408995448968908,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.00807249435832351,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140922,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426068,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724453,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333384,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.0046408552592747026,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378222,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.639,
						"acc_stderr,none": 0.015195720118175124,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.01447084674113471,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.766,
						"acc_stderr,none": 0.013394902889660009,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796398,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.010640169792499349,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689094,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.323,
						"acc_stderr,none": 0.014794927843348639,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.00949157995752504,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.818,
						"acc_stderr,none": 0.012207580637662165,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.591,
						"acc_stderr,none": 0.015555094373257939,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.013912208651021349,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118574,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936685,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697586,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847167,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.719,
						"acc_stderr,none": 0.014221154708434942,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747415,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.01516792886540756,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.015625625112620674,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.545,
						"acc_stderr,none": 0.01575510149834709,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.0076298239962803134,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.647,
						"acc_stderr,none": 0.015120172605483704,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621236,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103315,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.751,
						"acc_stderr,none": 0.013681600278702282,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280307,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578234,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139981,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.01042749887234397,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.01580663942303517,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.00619987406633706,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.01067487484483796,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178333,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.701,
						"acc_stderr,none": 0.014484778521220463,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.451,
						"acc_stderr,none": 0.015743152379585543,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724423,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817134,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.579,
						"acc_stderr,none": 0.015620595475301317,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849874,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823333,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.802,
						"acc_stderr,none": 0.012607733934175306,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235253,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697594,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656802,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.00556839357508138,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.015039986742055235,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.014632638658632898,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2696425487825937,
						"acc_norm,none": 0.2696425487825937,
						"acc_norm_stderr,none": 0.04118720900076286,
						"acc_stderr,none": 0.04118720900076286,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.036628698766429046,
						"acc_stderr,none": 0.036628698766429046,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.325,
						"acc_norm,none": 0.325,
						"acc_norm_stderr,none": 0.03714454174077367,
						"acc_stderr,none": 0.03714454174077367,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.03501438706296781,
						"acc_stderr,none": 0.03501438706296781,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.291866028708134,
						"acc_norm,none": 0.291866028708134,
						"acc_norm_stderr,none": 0.03152229446041968,
						"acc_stderr,none": 0.03152229446041968,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03489370652018761,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.26717557251908397,
						"acc_norm,none": 0.26717557251908397,
						"acc_norm_stderr,none": 0.038808483010823944,
						"acc_stderr,none": 0.038808483010823944,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.03921568627450977,
						"acc_stderr,none": 0.03921568627450977,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.27102803738317754,
						"acc_norm,none": 0.27102803738317754,
						"acc_norm_stderr,none": 0.04317273776566667,
						"acc_stderr,none": 0.04317273776566667,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2693498452012384,
						"acc_norm,none": 0.2693498452012384,
						"acc_norm_stderr,none": 0.024722089230802036,
						"acc_stderr,none": 0.024722089230802036,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2696078431372549,
						"acc_norm,none": 0.2696078431372549,
						"acc_norm_stderr,none": 0.031145570659486782,
						"acc_stderr,none": 0.031145570659486782,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.26256983240223464,
						"acc_norm,none": 0.26256983240223464,
						"acc_norm_stderr,none": 0.03298168673967122,
						"acc_stderr,none": 0.03298168673967122,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2616033755274262,
						"acc_norm,none": 0.2616033755274262,
						"acc_norm_stderr,none": 0.028609516716994934,
						"acc_stderr,none": 0.028609516716994934,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800374,
						"acc_stderr,none": 0.04142972007800374,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.34579439252336447,
						"acc_norm,none": 0.34579439252336447,
						"acc_norm_stderr,none": 0.0461969359662258,
						"acc_stderr,none": 0.0461969359662258,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980982,
						"acc_stderr,none": 0.03957835471980982,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.04176466758604902,
						"acc_stderr,none": 0.04176466758604902,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.043960933774393765,
						"acc_stderr,none": 0.043960933774393765,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2600732600732601,
						"acc_norm,none": 0.2600732600732601,
						"acc_norm_stderr,none": 0.02659853762760147,
						"acc_stderr,none": 0.02659853762760147,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.27450980392156865,
						"acc_norm,none": 0.27450980392156865,
						"acc_norm_stderr,none": 0.031321798030832904,
						"acc_stderr,none": 0.031321798030832904,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.25146198830409355,
						"acc_norm,none": 0.25146198830409355,
						"acc_norm_stderr,none": 0.033275044238468436,
						"acc_stderr,none": 0.033275044238468436,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.03711513959675177,
						"acc_stderr,none": 0.03711513959675177,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2589928057553957,
						"acc_norm,none": 0.2589928057553957,
						"acc_norm_stderr,none": 0.037291986581642324,
						"acc_stderr,none": 0.037291986581642324,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.29559748427672955,
						"acc_norm,none": 0.29559748427672955,
						"acc_norm_stderr,none": 0.036302143777231344,
						"acc_stderr,none": 0.036302143777231344,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.27607361963190186,
						"acc_norm,none": 0.27607361963190186,
						"acc_norm_stderr,none": 0.03512385283705051,
						"acc_stderr,none": 0.03512385283705051,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.03336605189761064,
						"acc_stderr,none": 0.03336605189761064,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2896825396825397,
						"acc_norm,none": 0.2896825396825397,
						"acc_norm_stderr,none": 0.028631924753360995,
						"acc_stderr,none": 0.028631924753360995,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.25252525252525254,
						"acc_norm,none": 0.25252525252525254,
						"acc_norm_stderr,none": 0.030954055470365907,
						"acc_stderr,none": 0.030954055470365907,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.33613445378151263,
						"acc_norm,none": 0.33613445378151263,
						"acc_norm_stderr,none": 0.030684737115135374,
						"acc_stderr,none": 0.030684737115135374,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.21304347826086956,
						"acc_norm,none": 0.21304347826086956,
						"acc_norm_stderr,none": 0.027057754389936187,
						"acc_stderr,none": 0.027057754389936187,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.03712537833614867,
						"acc_stderr,none": 0.03712537833614867,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.27972027972027974,
						"acc_norm,none": 0.27972027972027974,
						"acc_norm_stderr,none": 0.03766763889539851,
						"acc_stderr,none": 0.03766763889539851,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2840909090909091,
						"acc_norm,none": 0.2840909090909091,
						"acc_norm_stderr,none": 0.034090909090909075,
						"acc_stderr,none": 0.034090909090909075,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2483221476510067,
						"acc_norm,none": 0.2483221476510067,
						"acc_norm_stderr,none": 0.0355134404169743,
						"acc_stderr,none": 0.0355134404169743,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714263,
						"acc_stderr,none": 0.04025566684714263,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.041220665028782834,
						"acc_stderr,none": 0.041220665028782834,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.25874125874125875,
						"acc_norm,none": 0.25874125874125875,
						"acc_norm_stderr,none": 0.03675137438900237,
						"acc_stderr,none": 0.03675137438900237,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604674,
						"acc_stderr,none": 0.03893259610604674,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.032314709966177586,
						"acc_stderr,none": 0.032314709966177586,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.26744186046511625,
						"acc_norm,none": 0.26744186046511625,
						"acc_norm_stderr,none": 0.033848364281578606,
						"acc_stderr,none": 0.033848364281578606,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25790754257907544,
						"acc_norm,none": 0.25790754257907544,
						"acc_norm_stderr,none": 0.021605737836583278,
						"acc_stderr,none": 0.021605737836583278,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2803738317757009,
						"acc_norm,none": 0.2803738317757009,
						"acc_norm_stderr,none": 0.030777434132644294,
						"acc_stderr,none": 0.030777434132644294,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3170731707317073,
						"acc_norm,none": 0.3170731707317073,
						"acc_norm_stderr,none": 0.04212955964853051,
						"acc_stderr,none": 0.04212955964853051,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.30327868852459017,
						"acc_norm,none": 0.30327868852459017,
						"acc_norm_stderr,none": 0.041788598786318756,
						"acc_stderr,none": 0.041788598786318756,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2523809523809524,
						"acc_norm,none": 0.2523809523809524,
						"acc_norm_stderr,none": 0.030046599156031487,
						"acc_stderr,none": 0.030046599156031487,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2833333333333333,
						"acc_norm,none": 0.2833333333333333,
						"acc_norm_stderr,none": 0.03368068554116224,
						"acc_stderr,none": 0.03368068554116224,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2751322751322751,
						"acc_norm,none": 0.2751322751322751,
						"acc_norm_stderr,none": 0.032570260086303135,
						"acc_stderr,none": 0.032570260086303135,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.04126514736324099,
						"acc_stderr,none": 0.04126514736324099,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.03565998174135303,
						"acc_stderr,none": 0.03565998174135303,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2761904761904762,
						"acc_norm,none": 0.2761904761904762,
						"acc_norm_stderr,none": 0.04384295586918882,
						"acc_stderr,none": 0.04384295586918882,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26857142857142857,
						"acc_norm,none": 0.26857142857142857,
						"acc_norm_stderr,none": 0.033600151915923894,
						"acc_stderr,none": 0.033600151915923894,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767256,
						"acc_stderr,none": 0.030113040167767256,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.022517032434592285,
						"acc_stderr,none": 0.022517032434592285,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.2974137931034483,
						"acc_norm,none": 0.2974137931034483,
						"acc_norm_stderr,none": 0.030076297550592986,
						"acc_stderr,none": 0.030076297550592986,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2988505747126437,
						"acc_norm,none": 0.2988505747126437,
						"acc_norm_stderr,none": 0.034802407456637825,
						"acc_stderr,none": 0.034802407456637825,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.037857144650666544,
						"acc_stderr,none": 0.037857144650666544,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.2831858407079646,
						"acc_norm,none": 0.2831858407079646,
						"acc_norm_stderr,none": 0.03003639424509229,
						"acc_stderr,none": 0.03003639424509229,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.28484848484848485,
						"acc_norm,none": 0.28484848484848485,
						"acc_norm_stderr,none": 0.03524390844511784,
						"acc_stderr,none": 0.03524390844511784,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.031629303956979486,
						"acc_stderr,none": 0.031629303956979486,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.033603007963315286,
						"acc_stderr,none": 0.033603007963315286,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2546583850931677,
						"acc_norm,none": 0.2546583850931677,
						"acc_norm_stderr,none": 0.03444265995779324,
						"acc_stderr,none": 0.03444265995779324,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.020702674026557004,
						"mcc_stderr,none": 0.013133767897305228
					},
					"copa": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.03015113445777634,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5059552167698904,
						"acc_stderr,none": 0.009090808207026093,
						"alias": "glue",
						"f1,none": 0.631020281787408,
						"f1_stderr,none": 0.000349179192045029,
						"mcc,none": -0.020702674026557004,
						"mcc_stderr,none": 0.013133767897305228
					},
					"hellaswag": {
						"acc,none": 0.5236008763194583,
						"acc_norm,none": 0.7071300537741486,
						"acc_norm_stderr,none": 0.00454149215163922,
						"acc_stderr,none": 0.004984219681732656,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7108480496797982,
						"acc_stderr,none": 0.017205836842963076,
						"alias": "lambada",
						"perplexity,none": 3.8486899641486945,
						"perplexity_stderr,none": 0.24564821402052592
					},
					"lambada_multilingual": {
						"acc,none": 0.5304094702115273,
						"acc_stderr,none": 0.08721800425120818,
						"alias": "lambada_multilingual",
						"perplexity,none": 22.172057696255198,
						"perplexity_stderr,none": 8.836768749113485
					},
					"lambada_openai": {
						"acc,none": 0.7411216766931884,
						"acc_stderr,none": 0.006102456247229947,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3839058841861647,
						"perplexity_stderr,none": 0.06685814984543348
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4125751989132544,
						"acc_stderr,none": 0.006858667841807084,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 37.314890209611,
						"perplexity_stderr,none": 2.0842094719063256
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7428682320978071,
						"acc_stderr,none": 0.0060889980371011815,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3844117387889994,
						"perplexity_stderr,none": 0.06689670030178424
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4523578497962352,
						"acc_stderr,none": 0.006934283157219038,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.578440767710752,
						"perplexity_stderr,none": 1.4521331754304034
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5387153114690472,
						"acc_stderr,none": 0.006945063998809929,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 17.481921405215783,
						"perplexity_stderr,none": 0.848862203187913
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5055307587812925,
						"acc_stderr,none": 0.00696555147549591,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 23.100624359949446,
						"perplexity_stderr,none": 1.2292885816786672
					},
					"lambada_standard": {
						"acc,none": 0.6788278672617892,
						"acc_stderr,none": 0.006505202676138953,
						"alias": " - lambada_standard",
						"perplexity,none": 4.31288567701395,
						"perplexity_stderr,none": 0.09201358015331328
					},
					"logiqa": {
						"acc,none": 0.21505376344086022,
						"acc_norm,none": 0.2642089093701997,
						"acc_norm_stderr,none": 0.017293954549744518,
						"acc_stderr,none": 0.016115240864129177,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3045150263495229,
						"acc_stderr,none": 0.04953555240610539,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.03861229196653697,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34814814814814815,
						"acc_stderr,none": 0.041153246103369526,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.26973684210526316,
						"acc_stderr,none": 0.03611780560284898,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.35471698113207545,
						"acc_stderr,none": 0.029445175328199586,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3402777777777778,
						"acc_stderr,none": 0.039621355734862175,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.044084400227680794,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3352601156069364,
						"acc_stderr,none": 0.03599586301247077,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.22549019607843138,
						"acc_stderr,none": 0.041583075330832865,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421296,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3574468085106383,
						"acc_stderr,none": 0.03132941789476425,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.04049339297748142,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.037245636197746325,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2698412698412698,
						"acc_stderr,none": 0.02286083830923207,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.31746031746031744,
						"acc_stderr,none": 0.04163453031302859,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939098,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3967741935483871,
						"acc_stderr,none": 0.027831231605767944,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2660098522167488,
						"acc_stderr,none": 0.031089826002937523,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.30303030303030304,
						"acc_stderr,none": 0.035886248000917075,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.37373737373737376,
						"acc_stderr,none": 0.034468977386593325,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.40414507772020725,
						"acc_stderr,none": 0.0354150857888402,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.28717948717948716,
						"acc_stderr,none": 0.02293992541853061,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.22962962962962963,
						"acc_stderr,none": 0.025644108639267624,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.02934457250063435,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2119205298013245,
						"acc_stderr,none": 0.033367670865679766,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3192660550458716,
						"acc_stderr,none": 0.01998782906975001,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.20833333333333334,
						"acc_stderr,none": 0.027696910713093933,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.35784313725490197,
						"acc_stderr,none": 0.033644872860882996,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.03068582059661081,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.34977578475336324,
						"acc_stderr,none": 0.03200736719484503,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3511450381679389,
						"acc_stderr,none": 0.04186445163013751,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.28969181721572795,
						"acc_stderr,none": 0.03824850282109334,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.3305785123966942,
						"acc_stderr,none": 0.042943408452120954,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.32407407407407407,
						"acc_stderr,none": 0.04524596007030048,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3312883435582822,
						"acc_stderr,none": 0.03697983910025589,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.25892857142857145,
						"acc_stderr,none": 0.04157751539865629,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.3883495145631068,
						"acc_stderr,none": 0.048257293373563895,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3717948717948718,
						"acc_stderr,none": 0.03166098891888078,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.38058748403575987,
						"acc_stderr,none": 0.01736256412607543,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3092485549132948,
						"acc_stderr,none": 0.02488314057007176,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23687150837988827,
						"acc_stderr,none": 0.014219570788103984,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.30392156862745096,
						"acc_stderr,none": 0.02633661346904663,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3392339877695526,
						"acc_stderr,none": 0.04288143285779722,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.36012861736334406,
						"acc_stderr,none": 0.02726429759980402,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.30246913580246915,
						"acc_stderr,none": 0.025557653981868045,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25886524822695034,
						"acc_stderr,none": 0.026129572527180848,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2627118644067797,
						"acc_stderr,none": 0.011240545514995667,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.30514705882352944,
						"acc_stderr,none": 0.0279715413701706,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3022875816993464,
						"acc_stderr,none": 0.018579232711113888,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.39090909090909093,
						"acc_stderr,none": 0.04673752333670238,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.22857142857142856,
						"acc_stderr,none": 0.026882144922307744,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3149171270718232,
						"acc_stderr,none": 0.048141873697114604,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.38308457711442784,
						"acc_stderr,none": 0.03437519337338252,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.28227085315572475,
						"acc_stderr,none": 0.05990992504262538,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421276,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.29518072289156627,
						"acc_stderr,none": 0.035509201856896294,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3508771929824561,
						"acc_stderr,none": 0.03660298834049162,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.334793683138054,
						"acc_stderr,none": 0.004763693195633613,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3363506916192026,
						"acc_stderr,none": 0.0047650409531257015,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7475490196078431,
						"acc_stderr,none": 0.021533328427066328,
						"alias": " - mrpc",
						"f1,none": 0.8308702791461412,
						"f1_stderr,none": 0.016453659366850892
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.09279778393351801,
						"exact_match_stderr,remove_whitespace": 0.004829780711466167
					},
					"openbookqa": {
						"acc,none": 0.306,
						"acc_norm,none": 0.416,
						"acc_norm_stderr,none": 0.022064943313928866,
						"acc_stderr,none": 0.020629569998345393,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4335,
						"acc_stderr,none": 0.01108378546120757,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.354,
						"acc_stderr,none": 0.010695756149043481,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.445,
						"acc_stderr,none": 0.011115272135099212,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.549,
						"acc_stderr,none": 0.011129305041886322,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.011117724672834362,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.01116800618647258,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.011178751372184865,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48221428571428565,
						"acc_stderr,none": 0.057434463371788064,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7682263329706203,
						"acc_norm,none": 0.7774755168661589,
						"acc_norm_stderr,none": 0.009704600975718243,
						"acc_stderr,none": 0.009845143772794029,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7232217829104947,
						"acc_norm,none": 0.6303705111861454,
						"acc_norm_stderr,none": 0.010208178284621705,
						"acc_stderr,none": 0.1602628911159029,
						"alias": "pythia",
						"bits_per_byte,none": 0.6348094282831127,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.55273262940533,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3839058841861647,
						"perplexity_stderr,none": 0.06685814984543348,
						"word_perplexity,none": 10.516713929892898,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.5043016657514187,
						"acc_stderr,none": 0.006765160168388139,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.5775661637397972,
						"acc_stderr,none": 0.0024565958859106172,
						"alias": " - qqp",
						"f1,none": 0.6292895747867422,
						"f1_stderr,none": 0.0026353269439925824
					},
					"record": {
						"alias": "record",
						"em,none": 0.2797,
						"em_stderr,none": 0.00448874212676432,
						"f1,none": 0.28987857167720793,
						"f1_stderr,none": 0.004496925117999675
					},
					"rte": {
						"acc,none": 0.6498194945848376,
						"acc_stderr,none": 0.028713610811000382,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.948,
						"acc_norm,none": 0.927,
						"acc_norm_stderr,none": 0.008230354715244081,
						"acc_stderr,none": 0.007024624213817146,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.8818807339449541,
						"acc_stderr,none": 0.010935942543422823,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.30634483522785527,
						"acc_stderr,none": 0.0013926130044284689,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2974296205630355,
						"bleu_acc_stderr,none": 0.016002651487361016,
						"bleu_diff,none": -9.386974915039055,
						"bleu_diff_stderr,none": 0.866556280635364,
						"bleu_max,none": 25.948669712165213,
						"bleu_max_stderr,none": 0.7963384311288975,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.015321821688476196,
						"rouge1_diff,none": -11.811086736186299,
						"rouge1_diff_stderr,none": 0.9239436174679174,
						"rouge1_max,none": 50.80703114109932,
						"rouge1_max_stderr,none": 0.8861622869607654,
						"rouge2_acc,none": 0.22888616891064872,
						"rouge2_acc_stderr,none": 0.014706994909055027,
						"rouge2_diff,none": -14.2006556317876,
						"rouge2_diff_stderr,none": 1.113872956349568,
						"rouge2_max,none": 34.45515931973113,
						"rouge2_max_stderr,none": 1.0274050524758227,
						"rougeL_acc,none": 0.2594859241126071,
						"rougeL_acc_stderr,none": 0.015345409485557973,
						"rougeL_diff,none": -12.19469458488383,
						"rougeL_diff_stderr,none": 0.9305925303401709,
						"rougeL_max,none": 47.90848096362526,
						"rougeL_max_stderr,none": 0.9049994754821786
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2974296205630355,
						"bleu_acc_stderr,none": 0.016002651487361016,
						"bleu_diff,none": -9.386974915039055,
						"bleu_diff_stderr,none": 0.866556280635364,
						"bleu_max,none": 25.948669712165213,
						"bleu_max_stderr,none": 0.7963384311288975,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.015321821688476196,
						"rouge1_diff,none": -11.811086736186299,
						"rouge1_diff_stderr,none": 0.9239436174679174,
						"rouge1_max,none": 50.80703114109932,
						"rouge1_max_stderr,none": 0.8861622869607654,
						"rouge2_acc,none": 0.22888616891064872,
						"rouge2_acc_stderr,none": 0.014706994909055027,
						"rouge2_diff,none": -14.2006556317876,
						"rouge2_diff_stderr,none": 1.113872956349568,
						"rouge2_max,none": 34.45515931973113,
						"rouge2_max_stderr,none": 1.0274050524758227,
						"rougeL_acc,none": 0.2594859241126071,
						"rougeL_acc_stderr,none": 0.015345409485557973,
						"rougeL_diff,none": -12.19469458488383,
						"rougeL_diff_stderr,none": 0.9305925303401709,
						"rougeL_max,none": 47.90848096362526,
						"rougeL_max_stderr,none": 0.9049994754821786
					},
					"truthfulqa_mc1": {
						"acc,none": 0.23745410036719705,
						"acc_stderr,none": 0.014896277441041838,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3752355700885135,
						"acc_stderr,none": 0.01375350277652699,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6348094282831127,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.55273262940533,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.516713929892898,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6716653512233622,
						"acc_stderr,none": 0.013198299449717886,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.43661971830985913,
						"acc_stderr,none": 0.0592793555841297,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5192307692307693,
						"acc_stderr,none": 0.0492300107297805,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6187272727272727,
						"acc_stderr,none": 0.07173745845333644,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.598,
						"acc_stderr,none": 0.021948929609938612,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044296,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.019874354831287484,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.01982771485958757,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.486,
						"acc_stderr,none": 0.022374298166353185,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.02227969410784342,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.02204949796982787,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.022149790663861926,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.626,
						"acc_stderr,none": 0.02166071034720448,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.0203950954849366,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.020704041021724788,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43488621151271756,
						"acc_stderr,none": 0.04991122649696771,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337345,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4634538152610442,
						"acc_stderr,none": 0.009995265580368909,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4859437751004016,
						"acc_stderr,none": 0.010018111813088548,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38755020080321284,
						"acc_stderr,none": 0.009765326832218988,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5277108433734939,
						"acc_stderr,none": 0.010006669313970314,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4955823293172691,
						"acc_stderr,none": 0.010021681681769354,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4919678714859438,
						"acc_stderr,none": 0.010020779633955253,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42570281124497994,
						"acc_stderr,none": 0.009910810127822826,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4867469879518072,
						"acc_stderr,none": 0.010018551648218457,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39076305220883534,
						"acc_stderr,none": 0.00977996757994179,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42369477911646586,
						"acc_stderr,none": 0.0099046785408289,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4578313253012048,
						"acc_stderr,none": 0.009986366819196478,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40682730923694777,
						"acc_stderr,none": 0.009846529240598867,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3967871485943775,
						"acc_stderr,none": 0.009806220246670024,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3469879518072289,
						"acc_stderr,none": 0.009541251561568398,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6225858853257926,
						"acc_stderr,none": 0.06282041343261932,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5909993381866314,
						"acc_stderr,none": 0.012652228567132372,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7749834546657842,
						"acc_stderr,none": 0.010746448655964481,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7048312375909993,
						"acc_stderr,none": 0.01173786999944211,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5598941098610192,
						"acc_stderr,none": 0.01277447516071634,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6042356055592323,
						"acc_stderr,none": 0.012584415320654355,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6598279285241562,
						"acc_stderr,none": 0.012192034998028832,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5188616810059563,
						"acc_stderr,none": 0.01285796676246499,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6790205162144275,
						"acc_stderr,none": 0.012014110213469816,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5519523494374586,
						"acc_stderr,none": 0.012797478885304742,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5890138980807412,
						"acc_stderr,none": 0.012661578894368943,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.614824619457313,
						"acc_stderr,none": 0.012523231571141193,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8105192178017532,
						"acc_stderr,none": 0.03747749626431369,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8679569892473118,
						"acc_stderr,none": 0.007022451518434629,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.748696558915537,
						"acc_stderr,none": 0.014014234546353827,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.779467680608365,
						"acc_stderr,none": 0.025614420399944937,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6603174603174603,
						"acc_stderr,none": 0.026726874754294035,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7896825396825397,
						"acc_stderr,none": 0.01817104649769028,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_0-c1-290_pth"
	},
	"./rwkv-x-dev/1_0_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6440248027057497,
						"acc_norm,none": 0.6375422773393461,
						"acc_norm_stderr,none": 0.08590505067307808,
						"acc_stderr,none": 0.10724464660610677,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4734375,
						"acc_stderr,none": 0.056648696121819386,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.436,
						"acc_stderr,none": 0.20335181992639742,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8228358208955224,
						"acc_stderr,none": 0.16163190631666544,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2763744427934622,
						"acc_norm,none": 0.2763744427934622,
						"acc_norm_stderr,none": 0.12093283782238075,
						"acc_stderr,none": 0.12093283782238075,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.3006389224658954,
						"acc_norm,none": 0.3006389224658954,
						"acc_norm_stderr,none": 0.056411844725681275,
						"acc_stderr,none": 0.056411844725681275,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.6373173822301728,
						"likelihood_diff_stderr,none": 0.5327162538113843,
						"pct_stereotype,none": 0.6177698270721528,
						"pct_stereotype_stderr,none": 0.06760742841256165
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.12450787401574803,
						"exact_match_stderr,none": 0.007326044786419023
					},
					"glue": {
						"acc,none": 0.7468437351119581,
						"acc_stderr,none": 0.004647103645416364,
						"alias": "glue",
						"f1,none": 0.688406848367625,
						"f1_stderr,none": 0.00015325048490247542,
						"mcc,none": 0.1681096950363583,
						"mcc_stderr,none": 0.023633129903434982
					},
					"kmmlu": {
						"acc,none": 0.2536817788045047,
						"acc_norm,none": 0.2536817788045047,
						"acc_norm_stderr,none": 0.022361771070018573,
						"acc_stderr,none": 0.022361771070018573,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5608419206314406,
						"acc_norm,none": 0.546,
						"acc_norm_stderr,none": 0.0004967615230460869,
						"acc_stderr,none": 0.05413823602594502,
						"alias": "kobest",
						"f1,none": 0.5168425089886489,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7169609935959635,
						"acc_stderr,none": 0.022196681179743962,
						"alias": "lambada",
						"perplexity,none": 3.6140528967962298,
						"perplexity_stderr,none": 0.24652841873464582
					},
					"lambada_cloze": {
						"acc,none": 0.0361925092179313,
						"acc_stderr,none": 0.00316071616136545,
						"alias": "lambada_cloze",
						"perplexity,none": 521.6564771337919,
						"perplexity_stderr,none": 93.6747242720253
					},
					"lambada_multilingual": {
						"acc,none": 0.5439937900252281,
						"acc_stderr,none": 0.08772152576468445,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.904988395755094,
						"perplexity_stderr,none": 8.280704599420616
					},
					"mmlu": {
						"acc,none": 0.4125480700754878,
						"acc_stderr,none": 0.09027862756450644,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.39064824654622743,
						"acc_stderr,none": 0.096048292049521,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.46057289990344386,
						"acc_stderr,none": 0.08539426945163622,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.46376340591485216,
						"acc_stderr,none": 0.07434625886544384,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.34792261338407865,
						"acc_stderr,none": 0.07043041013199013,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.38992193044712564,
						"acc_norm,none": 0.3634476885805383,
						"acc_norm_stderr,none": 0.00011371629374220891,
						"acc_stderr,none": 0.08070470603309188,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4853571428571429,
						"acc_stderr,none": 0.047852612904459205,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.746678153587985,
						"acc_norm,none": 0.6414480664443043,
						"acc_norm_stderr,none": 0.009471016879550561,
						"acc_stderr,none": 0.15315909608413356,
						"alias": "pythia",
						"bits_per_byte,none": 0.6339933528504801,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5518545584363792,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.142745283508757,
						"perplexity_stderr,none": 0.060784045889904346,
						"word_perplexity,none": 10.484950606251948,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3971631205673759,
						"acc_norm,none": 0.4521276595744681,
						"acc_norm_stderr,none": 0.05921656033438769,
						"acc_stderr,none": 0.044889894198871454,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6748194735616119,
						"acc_stderr,none": 0.06325286648771192,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.34064905817472046,
						"acc_stderr,none": 0.001498200094875688,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3390452876376989,
						"bleu_acc_stderr,none": 0.016571797910626608,
						"bleu_diff,none": -5.972104217346434,
						"bleu_diff_stderr,none": 0.8792613229153681,
						"bleu_max,none": 27.203565363565886,
						"bleu_max_stderr,none": 0.7936807032535071,
						"rouge1_acc,none": 0.2937576499388005,
						"rouge1_acc_stderr,none": 0.015945068581236614,
						"rouge1_diff,none": -7.358205029036453,
						"rouge1_diff_stderr,none": 0.9656377889853766,
						"rouge1_max,none": 52.80755598148197,
						"rouge1_max_stderr,none": 0.8505859616794308,
						"rouge2_acc,none": 0.2778457772337821,
						"rouge2_acc_stderr,none": 0.015680929364024647,
						"rouge2_diff,none": -8.883936685112145,
						"rouge2_diff_stderr,none": 1.1594646666952948,
						"rouge2_max,none": 37.29912934907355,
						"rouge2_max_stderr,none": 1.0014935705415513,
						"rougeL_acc,none": 0.2998776009791922,
						"rougeL_acc_stderr,none": 0.016040352966713627,
						"rougeL_diff,none": -7.639846565154001,
						"rougeL_diff_stderr,none": 0.9924318394063312,
						"rougeL_max,none": 49.900538797915495,
						"rougeL_max_stderr,none": 0.8684219125995093
					},
					"xcopa": {
						"acc,none": 0.6199999999999999,
						"acc_stderr,none": 0.06740995761085669,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43180722891566264,
						"acc_stderr,none": 0.046774996360704875,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6313097888213706,
						"acc_stderr,none": 0.06301384286183202,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8174870757473589,
						"acc_stderr,none": 0.03905231772846098,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6440248027057497,
						"acc_norm,none": 0.6375422773393461,
						"acc_norm_stderr,none": 0.08590505067307808,
						"acc_stderr,none": 0.10724464660610677,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4734375,
						"acc_stderr,none": 0.056648696121819386,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.591,
						"acc_stderr,none": 0.015555094373257946,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.428,
						"acc_stderr,none": 0.015654426245029284,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.41333333333333333,
						"acc_stderr,none": 0.014221202817696512,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.41723549488054607,
						"acc_norm,none": 0.4564846416382253,
						"acc_norm_stderr,none": 0.014555949760496435,
						"acc_stderr,none": 0.014409825518403079,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7558922558922558,
						"acc_norm,none": 0.7268518518518519,
						"acc_norm_stderr,none": 0.009143032718360347,
						"acc_stderr,none": 0.008814322157999389,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.436,
						"acc_stderr,none": 0.20335181992639742,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.4575,
						"acc_stderr,none": 0.011142663706548622,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.752,
						"acc_stderr,none": 0.009658915432058835,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.249,
						"acc_stderr,none": 0.009671932233869848,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.9955,
						"acc_stderr,none": 0.0014969954902233175,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.292,
						"acc_stderr,none": 0.010169548163754639,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.401,
						"acc_stderr,none": 0.010961732517713438,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.271,
						"acc_stderr,none": 0.00994127328148805,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.4215,
						"acc_stderr,none": 0.01104444950789628,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.289,
						"acc_stderr,none": 0.010138584489351777,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.2315,
						"acc_stderr,none": 0.0094338949637514,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8228358208955224,
						"acc_stderr,none": 0.16163190631666544,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037186,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.0040899544896891,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448786,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524303,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286442,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.012461592646659973,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.701,
						"acc_stderr,none": 0.014484778521220473,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704163,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103336,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346931,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256581,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295448,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679195,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666654,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919315,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557422,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178334,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474916,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816324,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.666,
						"acc_stderr,none": 0.014922019523732965,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.693,
						"acc_stderr,none": 0.014593284892852627,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557421,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397227,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306475,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.468,
						"acc_stderr,none": 0.015786868759359005,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139973,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.826,
						"acc_stderr,none": 0.011994493230973418,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.595,
						"acc_stderr,none": 0.015531136990453043,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.713,
						"acc_stderr,none": 0.014312087053809961,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.00868051561552371,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333438,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753655,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866434,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.772,
						"acc_stderr,none": 0.013273740700804476,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557424,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.014205696104091507,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.653,
						"acc_stderr,none": 0.015060472031706627,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.592,
						"acc_stderr,none": 0.015549205052920673,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099179,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.0152048409129195,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946092,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139993,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.797,
						"acc_stderr,none": 0.01272607374459826,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.00173031615434694,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653886,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389601,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.745,
						"acc_stderr,none": 0.013790038620872842,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.012583693787968109,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897878,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695794,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.015605111967541944,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.531,
						"acc_stderr,none": 0.01578886595953901,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936724,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.0058939578161655674,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.617,
						"acc_stderr,none": 0.015380102325652699,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475286,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783217,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.297,
						"acc_stderr,none": 0.014456832294801103,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.819,
						"acc_stderr,none": 0.012181436179177897,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592072,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.01123486636423526,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.005568393575081365,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832028,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.388,
						"acc_stderr,none": 0.015417317979911077,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.379,
						"acc_stderr,none": 0.015349091002225349,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7180428134556575,
						"acc_stderr,none": 0.007869720238684475,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8571428571428571,
						"acc_stderr,none": 0.04718416136255828,
						"alias": "cb",
						"f1,none": 0.7009189640768588,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2763744427934622,
						"acc_norm,none": 0.2763744427934622,
						"acc_norm_stderr,none": 0.12093283782238075,
						"acc_stderr,none": 0.12093283782238075,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5151515151515151,
						"acc_norm,none": 0.5151515151515151,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0879391124952055,
						"acc_stderr,none": 0.0879391124952055,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.0659529705144534,
						"acc_stderr,none": 0.0659529705144534,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.06288639360110458,
						"acc_stderr,none": 0.06288639360110458,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857371,
						"acc_stderr,none": 0.08982552969857371,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063838,
						"acc_stderr,none": 0.07213122508063838,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.11433239009500591,
						"acc_stderr,none": 0.11433239009500591,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894599,
						"acc_stderr,none": 0.10540925533894599,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482894,
						"acc_stderr,none": 0.09810018692482894,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966263,
						"acc_stderr,none": 0.06520506636966263,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5454545454545454,
						"acc_norm,none": 0.5454545454545454,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.08742975048915691,
						"acc_stderr,none": 0.08742975048915691,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.07497837474124878,
						"acc_stderr,none": 0.07497837474124878,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.3695652173913043,
						"acc_norm,none": 0.3695652173913043,
						"acc_norm_stderr,none": 0.07195473383945741,
						"acc_stderr,none": 0.07195473383945741,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.3006389224658954,
						"acc_norm,none": 0.3006389224658954,
						"acc_norm_stderr,none": 0.056411844725681275,
						"acc_stderr,none": 0.056411844725681275,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.31952662721893493,
						"acc_norm,none": 0.31952662721893493,
						"acc_norm_stderr,none": 0.03597530251676527,
						"acc_stderr,none": 0.03597530251676527,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.28378378378378377,
						"acc_norm,none": 0.28378378378378377,
						"acc_norm_stderr,none": 0.03718409321285373,
						"acc_stderr,none": 0.03718409321285373,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.4125,
						"acc_norm,none": 0.4125,
						"acc_norm_stderr,none": 0.039040677866833816,
						"acc_stderr,none": 0.039040677866833816,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0347769116216366,
						"acc_stderr,none": 0.0347769116216366,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3397129186602871,
						"acc_norm,none": 0.3397129186602871,
						"acc_norm_stderr,none": 0.03283906353745934,
						"acc_stderr,none": 0.03283906353745934,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03489370652018761,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.29770992366412213,
						"acc_norm,none": 0.29770992366412213,
						"acc_norm_stderr,none": 0.040103589424622034,
						"acc_stderr,none": 0.040103589424622034,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.040718744426068945,
						"acc_stderr,none": 0.040718744426068945,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.04556837693674772,
						"acc_stderr,none": 0.04556837693674772,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3126934984520124,
						"acc_norm,none": 0.3126934984520124,
						"acc_norm_stderr,none": 0.02583489590078724,
						"acc_stderr,none": 0.02583489590078724,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.03166009679399813,
						"acc_stderr,none": 0.03166009679399813,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3687150837988827,
						"acc_norm,none": 0.3687150837988827,
						"acc_norm_stderr,none": 0.036161643250458134,
						"acc_stderr,none": 0.036161643250458134,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422647,
						"acc_stderr,none": 0.028146970599422647,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.04084247315337099,
						"acc_stderr,none": 0.04084247315337099,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.4205607476635514,
						"acc_norm,none": 0.4205607476635514,
						"acc_norm_stderr,none": 0.04794743635189597,
						"acc_stderr,none": 0.04794743635189597,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809826,
						"acc_stderr,none": 0.039578354719809826,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.04429811949614584,
						"acc_stderr,none": 0.04429811949614584,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.041988576623712234,
						"acc_stderr,none": 0.041988576623712234,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.26373626373626374,
						"acc_norm,none": 0.26373626373626374,
						"acc_norm_stderr,none": 0.02671881407296754,
						"acc_stderr,none": 0.02671881407296754,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3284313725490196,
						"acc_norm,none": 0.3284313725490196,
						"acc_norm_stderr,none": 0.03296245110172229,
						"acc_stderr,none": 0.03296245110172229,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.27485380116959063,
						"acc_norm,none": 0.27485380116959063,
						"acc_norm_stderr,none": 0.034240429246915824,
						"acc_stderr,none": 0.034240429246915824,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.03694846055443904,
						"acc_stderr,none": 0.03694846055443904,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3270440251572327,
						"acc_norm,none": 0.3270440251572327,
						"acc_norm_stderr,none": 0.0373222564649312,
						"acc_stderr,none": 0.0373222564649312,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3558282208588957,
						"acc_norm,none": 0.3558282208588957,
						"acc_norm_stderr,none": 0.03761521380046734,
						"acc_stderr,none": 0.03761521380046734,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.3023255813953488,
						"acc_norm,none": 0.3023255813953488,
						"acc_norm_stderr,none": 0.03512091263428369,
						"acc_stderr,none": 0.03512091263428369,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.028394293050790515,
						"acc_stderr,none": 0.028394293050790515,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.031911782267135466,
						"acc_stderr,none": 0.031911782267135466,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.032145368597886394,
						"acc_stderr,none": 0.032145368597886394,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.25217391304347825,
						"acc_norm,none": 0.25217391304347825,
						"acc_norm_stderr,none": 0.02869674529449336,
						"acc_stderr,none": 0.02869674529449336,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.03822127078536156,
						"acc_stderr,none": 0.03822127078536156,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.30113636363636365,
						"acc_norm,none": 0.30113636363636365,
						"acc_norm_stderr,none": 0.03467837977202437,
						"acc_stderr,none": 0.03467837977202437,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.30201342281879195,
						"acc_norm,none": 0.30201342281879195,
						"acc_norm_stderr,none": 0.03774033930941344,
						"acc_stderr,none": 0.03774033930941344,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.288135593220339,
						"acc_norm,none": 0.288135593220339,
						"acc_norm_stderr,none": 0.04187011593049808,
						"acc_stderr,none": 0.04187011593049808,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.21951219512195122,
						"acc_norm,none": 0.21951219512195122,
						"acc_norm_stderr,none": 0.03242041613395385,
						"acc_stderr,none": 0.03242041613395385,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.30158730158730157,
						"acc_norm,none": 0.30158730158730157,
						"acc_norm_stderr,none": 0.04104947269903394,
						"acc_stderr,none": 0.04104947269903394,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.032530209055933366,
						"acc_stderr,none": 0.032530209055933366,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3488372093023256,
						"acc_norm,none": 0.3488372093023256,
						"acc_norm_stderr,none": 0.03644669348694787,
						"acc_stderr,none": 0.03644669348694787,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.27007299270072993,
						"acc_norm,none": 0.27007299270072993,
						"acc_norm_stderr,none": 0.02192746197287115,
						"acc_stderr,none": 0.02192746197287115,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.42990654205607476,
						"acc_norm,none": 0.42990654205607476,
						"acc_norm_stderr,none": 0.033921125520669684,
						"acc_stderr,none": 0.033921125520669684,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3008130081300813,
						"acc_norm,none": 0.3008130081300813,
						"acc_norm_stderr,none": 0.04152073768551428,
						"acc_stderr,none": 0.04152073768551428,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3114754098360656,
						"acc_norm,none": 0.3114754098360656,
						"acc_norm_stderr,none": 0.04209969267310141,
						"acc_stderr,none": 0.04209969267310141,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.34285714285714286,
						"acc_norm,none": 0.34285714285714286,
						"acc_norm_stderr,none": 0.032833210696431546,
						"acc_stderr,none": 0.032833210696431546,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.03387720998298804,
						"acc_stderr,none": 0.03387720998298804,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3439153439153439,
						"acc_norm,none": 0.3439153439153439,
						"acc_norm_stderr,none": 0.03464390125743289,
						"acc_stderr,none": 0.03464390125743289,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.04167808180844153,
						"acc_stderr,none": 0.04167808180844153,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.03855289616378948,
						"acc_stderr,none": 0.03855289616378948,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3238095238095238,
						"acc_norm,none": 0.3238095238095238,
						"acc_norm_stderr,none": 0.04588414718067474,
						"acc_stderr,none": 0.04588414718067474,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03424737867752742,
						"acc_stderr,none": 0.03424737867752742,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767256,
						"acc_stderr,none": 0.030113040167767256,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2632978723404255,
						"acc_norm,none": 0.2632978723404255,
						"acc_norm_stderr,none": 0.022743327388426438,
						"acc_stderr,none": 0.022743327388426438,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.35344827586206895,
						"acc_norm,none": 0.35344827586206895,
						"acc_norm_stderr,none": 0.031452746950022696,
						"acc_stderr,none": 0.031452746950022696,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3390804597701149,
						"acc_norm,none": 0.3390804597701149,
						"acc_norm_stderr,none": 0.03599172203897236,
						"acc_stderr,none": 0.03599172203897236,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3185840707964602,
						"acc_norm,none": 0.3185840707964602,
						"acc_norm_stderr,none": 0.031061820840326118,
						"acc_stderr,none": 0.031061820840326118,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.32727272727272727,
						"acc_norm,none": 0.32727272727272727,
						"acc_norm_stderr,none": 0.03663974994391242,
						"acc_stderr,none": 0.03663974994391242,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.032530209055933366,
						"acc_stderr,none": 0.032530209055933366,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.28994082840236685,
						"acc_norm,none": 0.28994082840236685,
						"acc_norm_stderr,none": 0.03500638924911012,
						"acc_stderr,none": 0.03500638924911012,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2981366459627329,
						"acc_norm,none": 0.2981366459627329,
						"acc_norm_stderr,none": 0.03616379286462019,
						"acc_stderr,none": 0.03616379286462019,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.31875,
						"acc_norm,none": 0.31875,
						"acc_norm_stderr,none": 0.036955560385363254,
						"acc_stderr,none": 0.036955560385363254,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.1681096950363583,
						"mcc_stderr,none": 0.023633129903434982
					},
					"copa": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.03588702812826371,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.6373173822301728,
						"likelihood_diff_stderr,none": 0.5327162538113843,
						"pct_stereotype,none": 0.6177698270721528,
						"pct_stereotype_stderr,none": 0.06760742841256165
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.9213625521765056,
						"likelihood_diff_stderr,none": 0.09225031794556265,
						"pct_stereotype,none": 0.6499701848539058,
						"pct_stereotype_stderr,none": 0.011650973912575054
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.041208791208791,
						"likelihood_diff_stderr,none": 0.4121368001118892,
						"pct_stereotype,none": 0.6593406593406593,
						"pct_stereotype_stderr,none": 0.04995670951276871
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.715909090909091,
						"likelihood_diff_stderr,none": 1.4542346969175948,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.061538461538461,
						"likelihood_diff_stderr,none": 0.6177227965700853,
						"pct_stereotype,none": 0.7538461538461538,
						"pct_stereotype_stderr,none": 0.05384615384615383
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.837109375,
						"likelihood_diff_stderr,none": 0.16662718987982708,
						"pct_stereotype,none": 0.61875,
						"pct_stereotype_stderr,none": 0.027193630402775476
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.881365740740741,
						"likelihood_diff_stderr,none": 0.25948861754988745,
						"pct_stereotype,none": 0.6111111111111112,
						"pct_stereotype_stderr,none": 0.03324708911809117
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.479166666666667,
						"likelihood_diff_stderr,none": 0.390241889594603,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.80880905511811,
						"likelihood_diff_stderr,none": 0.16711400721430109,
						"pct_stereotype,none": 0.5748031496062992,
						"pct_stereotype_stderr,none": 0.021955867910832084
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.9436936936936937,
						"likelihood_diff_stderr,none": 0.3590903668024958,
						"pct_stereotype,none": 0.7117117117117117,
						"pct_stereotype_stderr,none": 0.04318860867532051
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.125,
						"likelihood_diff_stderr,none": 0.4815282425157905,
						"pct_stereotype,none": 0.8924731182795699,
						"pct_stereotype_stderr,none": 0.03229700003364003
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.345394736842105,
						"likelihood_diff_stderr,none": 0.2515012898291121,
						"pct_stereotype,none": 0.7,
						"pct_stereotype_stderr,none": 0.03333333333333336
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.3508124627310676,
						"likelihood_diff_stderr,none": 0.07852461531730219,
						"pct_stereotype,none": 0.5855694692903995,
						"pct_stereotype_stderr,none": 0.012033115254328987
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.6694444444444443,
						"likelihood_diff_stderr,none": 0.3211448528423643,
						"pct_stereotype,none": 0.5888888888888889,
						"pct_stereotype_stderr,none": 0.052155640611075534
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.8653846153846154,
						"likelihood_diff_stderr,none": 0.8532626789043604,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.901515151515151,
						"likelihood_diff_stderr,none": 0.4746142706878124,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.056159743502623156
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.769470404984424,
						"likelihood_diff_stderr,none": 0.13327267141158228,
						"pct_stereotype,none": 0.6230529595015576,
						"pct_stereotype_stderr,none": 0.02709116375533661
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.4970355731225298,
						"likelihood_diff_stderr,none": 0.20228846624438102,
						"pct_stereotype,none": 0.4268774703557312,
						"pct_stereotype_stderr,none": 0.031158395621279214
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.4479166666666665,
						"likelihood_diff_stderr,none": 0.4341670355625479,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.05594542388644592
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.135869565217391,
						"likelihood_diff_stderr,none": 0.1600658366042133,
						"pct_stereotype,none": 0.48043478260869565,
						"pct_stereotype_stderr,none": 0.02332012708760827
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.1847826086956523,
						"likelihood_diff_stderr,none": 0.27422811296045174,
						"pct_stereotype,none": 0.7652173913043478,
						"pct_stereotype_stderr,none": 0.039698395317531235
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.381868131868132,
						"likelihood_diff_stderr,none": 0.31734409561304555,
						"pct_stereotype,none": 0.8021978021978022,
						"pct_stereotype_stderr,none": 0.041988952031962214
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.055803571428571,
						"likelihood_diff_stderr,none": 0.24925499562442802,
						"pct_stereotype,none": 0.7091836734693877,
						"pct_stereotype_stderr,none": 0.03252156607969809
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.12450787401574803,
						"exact_match_stderr,none": 0.007326044786419023
					},
					"glue": {
						"acc,none": 0.7468437351119581,
						"acc_stderr,none": 0.004647103645416364,
						"alias": "glue",
						"f1,none": 0.688406848367625,
						"f1_stderr,none": 0.00015325048490247542,
						"mcc,none": 0.1681096950363583,
						"mcc_stderr,none": 0.023633129903434982
					},
					"hellaswag": {
						"acc,none": 0.5535749850627365,
						"acc_norm,none": 0.7428799044015136,
						"acc_norm_stderr,none": 0.004361529679492746,
						"acc_stderr,none": 0.004961054589573467,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2536817788045047,
						"acc_norm,none": 0.2536817788045047,
						"acc_norm_stderr,none": 0.022361771070018573,
						"acc_stderr,none": 0.022361771070018573,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.0416333199893227,
						"acc_stderr,none": 0.0416333199893227,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.01349300044693759,
						"acc_stderr,none": 0.01349300044693759,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.01366318713487765,
						"acc_stderr,none": 0.01366318713487765,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.013790038620872823,
						"acc_stderr,none": 0.013790038620872823,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.013736254390651145,
						"acc_stderr,none": 0.013736254390651145,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.24333333333333335,
						"acc_norm,none": 0.24333333333333335,
						"acc_norm_stderr,none": 0.017532332270077985,
						"acc_stderr,none": 0.017532332270077985,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.013531522534515436,
						"acc_stderr,none": 0.013531522534515436,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.01451239503354315,
						"acc_stderr,none": 0.01451239503354315,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.013414729030247123,
						"acc_stderr,none": 0.013414729030247123,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.031828687164775826,
						"acc_stderr,none": 0.031828687164775826,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306263,
						"acc_stderr,none": 0.014158794845306263,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2153846153846154,
						"acc_norm,none": 0.2153846153846154,
						"acc_norm_stderr,none": 0.03619435936612662,
						"acc_stderr,none": 0.03619435936612662,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.04512608598542127,
						"acc_stderr,none": 0.04512608598542127,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.237,
						"acc_norm,none": 0.237,
						"acc_norm_stderr,none": 0.013454070462577952,
						"acc_stderr,none": 0.013454070462577952,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633913,
						"acc_stderr,none": 0.014046255632633913,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.253,
						"acc_norm,none": 0.253,
						"acc_norm_stderr,none": 0.01375427861358708,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.228,
						"acc_norm,none": 0.228,
						"acc_norm_stderr,none": 0.013273740700804478,
						"acc_stderr,none": 0.013273740700804478,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774166,
						"acc_stderr,none": 0.013877773329774166,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881412,
						"acc_stderr,none": 0.013588548437881412,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440476,
						"acc_stderr,none": 0.013946271849440476,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881423,
						"acc_stderr,none": 0.013588548437881423,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.04512608598542126,
						"acc_stderr,none": 0.04512608598542126,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750648,
						"acc_stderr,none": 0.013626065817750648,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145155,
						"acc_stderr,none": 0.013979965645145155,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.251,
						"acc_norm,none": 0.251,
						"acc_norm_stderr,none": 0.013718133516888938,
						"acc_stderr,none": 0.013718133516888938,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.228,
						"acc_norm,none": 0.228,
						"acc_norm_stderr,none": 0.013273740700804481,
						"acc_stderr,none": 0.013273740700804481,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.013736254390651154,
						"acc_stderr,none": 0.013736254390651154,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.232,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.013354937452281581,
						"acc_stderr,none": 0.013354937452281581,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.25166666666666665,
						"acc_norm,none": 0.25166666666666665,
						"acc_norm_stderr,none": 0.01773156149490717,
						"acc_stderr,none": 0.01773156149490717,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796273,
						"acc_stderr,none": 0.013996674851796273,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145153,
						"acc_stderr,none": 0.013979965645145153,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.013607356839598118,
						"acc_stderr,none": 0.013607356839598118,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.01417451646148525,
						"acc_stderr,none": 0.01417451646148525,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.042923469599092816,
						"acc_stderr,none": 0.042923469599092816,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.02536687329706923,
						"acc_stderr,none": 0.02536687329706923,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462613,
						"acc_stderr,none": 0.014078856992462613,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.013531522534515424,
						"acc_stderr,none": 0.013531522534515424,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.013825416526895042,
						"acc_stderr,none": 0.013825416526895042,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.02912242397001744,
						"acc_stderr,none": 0.02912242397001744,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.229,
						"acc_norm,none": 0.229,
						"acc_norm_stderr,none": 0.01329419932661359,
						"acc_stderr,none": 0.01329419932661359,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809947,
						"acc_stderr,none": 0.013963164754809947,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.02983202555549525,
						"acc_stderr,none": 0.02983202555549525,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.292,
						"acc_norm,none": 0.292,
						"acc_norm_stderr,none": 0.014385511563477336,
						"acc_stderr,none": 0.014385511563477336,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5608419206314406,
						"acc_norm,none": 0.546,
						"acc_norm_stderr,none": 0.0004967615230460869,
						"acc_stderr,none": 0.05413823602594502,
						"alias": "kobest",
						"f1,none": 0.5168425089886489,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.6210826210826211,
						"acc_stderr,none": 0.012951441710828772,
						"alias": " - kobest_boolq",
						"f1,none": 0.6111856077957774,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.631,
						"acc_stderr,none": 0.015266698139154622,
						"alias": " - kobest_copa",
						"f1,none": 0.6301830744113748,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.422,
						"acc_norm,none": 0.546,
						"acc_norm_stderr,none": 0.02228814759117695,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.4183444300121193,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5491183879093199,
						"acc_stderr,none": 0.025004412942296043,
						"alias": " - kobest_sentineg",
						"f1,none": 0.4478785477317048,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.49682539682539684,
						"acc_stderr,none": 0.014091213185340047,
						"alias": " - kobest_wic",
						"f1,none": 0.38258017244844017,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7169609935959635,
						"acc_stderr,none": 0.022196681179743962,
						"alias": "lambada",
						"perplexity,none": 3.6140528967962298,
						"perplexity_stderr,none": 0.24652841873464582
					},
					"lambada_cloze": {
						"acc,none": 0.0361925092179313,
						"acc_stderr,none": 0.00316071616136545,
						"alias": "lambada_cloze",
						"perplexity,none": 521.6564771337919,
						"perplexity_stderr,none": 93.6747242720253
					},
					"lambada_multilingual": {
						"acc,none": 0.5439937900252281,
						"acc_stderr,none": 0.08772152576468445,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.904988395755094,
						"perplexity_stderr,none": 8.280704599420616
					},
					"lambada_openai": {
						"acc,none": 0.7587812924509995,
						"acc_stderr,none": 0.005960406413916588,
						"alias": " - lambada_openai",
						"perplexity,none": 3.142745283508757,
						"perplexity_stderr,none": 0.060784045889904346
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.03978265088298079,
						"acc_stderr,none": 0.0027229753280860617,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 338.0551414821208,
						"perplexity_stderr,none": 11.581803933669871
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4302348146710654,
						"acc_stderr,none": 0.006897835015074963,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.67571265394163,
						"perplexity_stderr,none": 1.9481192457595293
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7581991073161265,
						"acc_stderr,none": 0.005965305048434235,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.1448129106483345,
						"perplexity_stderr,none": 0.06080104854020486
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4574034542984669,
						"acc_stderr,none": 0.00694065256687139,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.477147661932488,
						"perplexity_stderr,none": 1.4113304031830556
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5532699398408694,
						"acc_stderr,none": 0.0069263303079770315,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.42822336206449,
						"perplexity_stderr,none": 0.8068021436774651
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5208616339996118,
						"acc_stderr,none": 0.006959911720851461,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.79904539018854,
						"perplexity_stderr,none": 1.1679185444612057
					},
					"lambada_standard": {
						"acc,none": 0.6743644478944304,
						"acc_stderr,none": 0.0065286789578354616,
						"alias": " - lambada_standard",
						"perplexity,none": 4.084886717912662,
						"perplexity_stderr,none": 0.08371004549899955
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.03260236755288182,
						"acc_stderr,none": 0.0024742247822429524,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 705.2578127854631,
						"perplexity_stderr,none": 23.653250250235494
					},
					"logiqa": {
						"acc,none": 0.2457757296466974,
						"acc_norm,none": 0.27956989247311825,
						"acc_norm_stderr,none": 0.017602909186822453,
						"acc_stderr,none": 0.016887410894296944,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2595419847328244,
						"acc_norm,none": 0.29961832061068705,
						"acc_norm_stderr,none": 0.011557488735539885,
						"acc_stderr,none": 0.011060275310259939,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25896147403685094,
						"acc_norm,none": 0.2659966499162479,
						"acc_norm_stderr,none": 0.008088867008866097,
						"acc_stderr,none": 0.008019338828219907,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.5079432323660241,
						"acc_stderr,none": 0.005145246608139393,
						"alias": "mc_taco",
						"f1,none": 0.5511978361669243,
						"f1_stderr,none": 0.005896465719453553
					},
					"medmcqa": {
						"acc,none": 0.35524743007410947,
						"acc_norm,none": 0.35524743007410947,
						"acc_norm_stderr,none": 0.0074006522761731535,
						"acc_stderr,none": 0.0074006522761731535,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3739198743126473,
						"acc_norm,none": 0.3739198743126473,
						"acc_norm_stderr,none": 0.01356627691828067,
						"acc_stderr,none": 0.01356627691828067,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.4125480700754878,
						"acc_stderr,none": 0.09027862756450644,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695236,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4074074074074074,
						"acc_stderr,none": 0.04244633238353229,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3815789473684211,
						"acc_stderr,none": 0.03953173377749193,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.04793724854411019,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.03015113445777629,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4097222222222222,
						"acc_stderr,none": 0.04112490974670787,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.32947976878612717,
						"acc_stderr,none": 0.03583901754736411,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.042801058373643966,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.05009082659620333,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3617021276595745,
						"acc_stderr,none": 0.03141082197596239,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2894736842105263,
						"acc_stderr,none": 0.04266339443159394,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.45517241379310347,
						"acc_stderr,none": 0.04149886942192117,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.3148148148148148,
						"acc_stderr,none": 0.023919984164047742,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.31746031746031744,
						"acc_stderr,none": 0.04163453031302859,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.047609522856952344,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.44516129032258067,
						"acc_stderr,none": 0.02827241018621491,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.3399014778325123,
						"acc_stderr,none": 0.033327690684107895,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.049756985195624284,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5636363636363636,
						"acc_stderr,none": 0.03872592983524754,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.46464646464646464,
						"acc_stderr,none": 0.03553436368828063,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5595854922279793,
						"acc_stderr,none": 0.03582724530036095,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.37435897435897436,
						"acc_stderr,none": 0.024537591572830506,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.31851851851851853,
						"acc_stderr,none": 0.02840653309060846,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.36134453781512604,
						"acc_stderr,none": 0.031204691225150016,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.24503311258278146,
						"acc_stderr,none": 0.03511807571804723,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5284403669724771,
						"acc_stderr,none": 0.021402615697348044,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2175925925925926,
						"acc_stderr,none": 0.028139689444859676,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5196078431372549,
						"acc_stderr,none": 0.03506612560524866,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5864978902953587,
						"acc_stderr,none": 0.03205649904851859,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.42152466367713004,
						"acc_stderr,none": 0.03314190222110657,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5038167938931297,
						"acc_stderr,none": 0.043851623256015534,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.39064824654622743,
						"acc_stderr,none": 0.096048292049521,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4628099173553719,
						"acc_stderr,none": 0.04551711196104218,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5092592592592593,
						"acc_stderr,none": 0.04832853553437056,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3987730061349693,
						"acc_stderr,none": 0.03847021420456023,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.04432804055291518,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5436893203883495,
						"acc_stderr,none": 0.049318019942204146,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6111111111111112,
						"acc_stderr,none": 0.03193705726200293,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.04975698519562428,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5964240102171137,
						"acc_stderr,none": 0.017544332237926424,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.4161849710982659,
						"acc_stderr,none": 0.02653818910470548,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23128491620111732,
						"acc_stderr,none": 0.014102223623152577,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.4673202614379085,
						"acc_stderr,none": 0.028568699752225868,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.46057289990344386,
						"acc_stderr,none": 0.08539426945163622,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.5337620578778135,
						"acc_stderr,none": 0.02833327710956278,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.48148148148148145,
						"acc_stderr,none": 0.027801656212323667,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.3191489361702128,
						"acc_stderr,none": 0.0278079901413202,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3272490221642764,
						"acc_stderr,none": 0.011983819806464747,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.38235294117647056,
						"acc_stderr,none": 0.029520095697687765,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.4395424836601307,
						"acc_stderr,none": 0.020079420408087915,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.43636363636363634,
						"acc_stderr,none": 0.04750185058907296,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.40816326530612246,
						"acc_stderr,none": 0.03146465712827424,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.46376340591485216,
						"acc_stderr,none": 0.07434625886544384,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.6467661691542289,
						"acc_stderr,none": 0.03379790611796777,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.34792261338407865,
						"acc_stderr,none": 0.07043041013199013,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.04902071300001974,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.03726214354322415,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6374269005847953,
						"acc_stderr,none": 0.0368713061556206,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7941925624044829,
						"acc_stderr,none": 0.004081035886294968,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7951586655817738,
						"acc_stderr,none": 0.004070397737448466,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.696078431372549,
						"acc_stderr,none": 0.022798834443163555,
						"alias": "mrpc",
						"f1,none": 0.8176470588235294,
						"f1_stderr,none": 0.01609184235033859
					},
					"multimedqa": {
						"acc,none": 0.38992193044712564,
						"acc_norm,none": 0.3634476885805383,
						"acc_norm_stderr,none": 0.00011371629374220891,
						"acc_stderr,none": 0.08070470603309188,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5662128712871287,
						"acc_stderr,none": 0.007118552456859644,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7102144484998948,
						"mrr_stderr,none": 0.010334435914923234,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.41196388261851014,
						"r@2_stderr,none": 0.01654473961960943
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6578254342819175,
						"mrr_stderr,none": 0.010454987139158683,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4616252821670429,
						"r@2_stderr,none": 0.016757741478801033
					},
					"openbookqa": {
						"acc,none": 0.304,
						"acc_norm,none": 0.418,
						"acc_norm_stderr,none": 0.022080014812228137,
						"acc_stderr,none": 0.020591649571224932,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4425,
						"acc_stderr,none": 0.011108941411747607,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.010909147755547948,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.408,
						"acc_stderr,none": 0.010992197878818593,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.545,
						"acc_stderr,none": 0.011137752231145218,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5425,
						"acc_stderr,none": 0.011142663706548624,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5345,
						"acc_stderr,none": 0.011156482803925168,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.535,
						"acc_stderr,none": 0.01115570369194311,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4853571428571429,
						"acc_stderr,none": 0.047852612904459205,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7812840043525572,
						"acc_norm,none": 0.7878128400435256,
						"acc_norm_stderr,none": 0.009539299828174062,
						"acc_stderr,none": 0.00964473193266758,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2510140905209223,
						"acc_norm,none": 0.288535439795047,
						"acc_norm_stderr,none": 0.0033101629160730913,
						"acc_stderr,none": 0.0031678092713174795,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.020591649571224932,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.746678153587985,
						"acc_norm,none": 0.6414480664443043,
						"acc_norm_stderr,none": 0.009471016879550561,
						"acc_stderr,none": 0.15315909608413356,
						"alias": "pythia",
						"bits_per_byte,none": 0.6339933528504801,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5518545584363792,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.142745283508757,
						"perplexity_stderr,none": 0.060784045889904346,
						"word_perplexity,none": 10.484950606251948,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3971631205673759,
						"acc_norm,none": 0.4521276595744681,
						"acc_norm_stderr,none": 0.05921656033438769,
						"acc_stderr,none": 0.044889894198871454,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.45,
						"acc_norm,none": 0.5666666666666667,
						"acc_norm_stderr,none": 0.04542567625794981,
						"acc_stderr,none": 0.04560517440787952,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.34375,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.03920394987159571,
						"acc_stderr,none": 0.03766668927755763,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.40492957746478875,
						"acc_norm,none": 0.41901408450704225,
						"acc_norm_stderr,none": 0.029329448381681836,
						"acc_stderr,none": 0.029179692752203355,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7557259460796438,
						"acc_stderr,none": 0.002136851474248549,
						"alias": "qqp",
						"f1,none": 0.6873100303951368,
						"f1_stderr,none": 0.0029879508793415857
					},
					"race": {
						"acc,none": 0.3492822966507177,
						"acc_stderr,none": 0.014754834713104478,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2622,
						"em_stderr,none": 0.004398528243068212,
						"f1,none": 0.27193523834049704,
						"f1_stderr,none": 0.004411570223398709
					},
					"rte": {
						"acc,none": 0.6787003610108303,
						"acc_stderr,none": 0.02810862605328869,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.956,
						"acc_norm,none": 0.944,
						"acc_norm_stderr,none": 0.007274401481697071,
						"acc_stderr,none": 0.00648892179842742,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6750902527075813,
						"acc_stderr,none": 0.028190822551170357,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.01120598290257748,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5864740577826651,
						"acc_norm,none": 0.77221833449965,
						"acc_norm_stderr,none": 0.002965242106965758,
						"acc_stderr,none": 0.003481821519873869,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6748194735616119,
						"acc_stderr,none": 0.06325286648771192,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6227964743589743,
						"acc_stderr,none": 0.004850990328859875,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8430120604033647,
						"acc_stderr,none": 0.0036625155995172196,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5630392156862745,
						"acc_stderr,none": 0.00491147296156521,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.34064905817472046,
						"acc_stderr,none": 0.001498200094875688,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3390452876376989,
						"bleu_acc_stderr,none": 0.016571797910626608,
						"bleu_diff,none": -5.972104217346434,
						"bleu_diff_stderr,none": 0.8792613229153681,
						"bleu_max,none": 27.203565363565886,
						"bleu_max_stderr,none": 0.7936807032535071,
						"rouge1_acc,none": 0.2937576499388005,
						"rouge1_acc_stderr,none": 0.015945068581236614,
						"rouge1_diff,none": -7.358205029036453,
						"rouge1_diff_stderr,none": 0.9656377889853766,
						"rouge1_max,none": 52.80755598148197,
						"rouge1_max_stderr,none": 0.8505859616794308,
						"rouge2_acc,none": 0.2778457772337821,
						"rouge2_acc_stderr,none": 0.015680929364024647,
						"rouge2_diff,none": -8.883936685112145,
						"rouge2_diff_stderr,none": 1.1594646666952948,
						"rouge2_max,none": 37.29912934907355,
						"rouge2_max_stderr,none": 1.0014935705415513,
						"rougeL_acc,none": 0.2998776009791922,
						"rougeL_acc_stderr,none": 0.016040352966713627,
						"rougeL_diff,none": -7.639846565154001,
						"rougeL_diff_stderr,none": 0.9924318394063312,
						"rougeL_max,none": 49.900538797915495,
						"rougeL_max_stderr,none": 0.8684219125995093
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3390452876376989,
						"bleu_acc_stderr,none": 0.016571797910626608,
						"bleu_diff,none": -5.972104217346434,
						"bleu_diff_stderr,none": 0.8792613229153681,
						"bleu_max,none": 27.203565363565886,
						"bleu_max_stderr,none": 0.7936807032535071,
						"rouge1_acc,none": 0.2937576499388005,
						"rouge1_acc_stderr,none": 0.015945068581236614,
						"rouge1_diff,none": -7.358205029036453,
						"rouge1_diff_stderr,none": 0.9656377889853766,
						"rouge1_max,none": 52.80755598148197,
						"rouge1_max_stderr,none": 0.8505859616794308,
						"rouge2_acc,none": 0.2778457772337821,
						"rouge2_acc_stderr,none": 0.015680929364024647,
						"rouge2_diff,none": -8.883936685112145,
						"rouge2_diff_stderr,none": 1.1594646666952948,
						"rouge2_max,none": 37.29912934907355,
						"rouge2_max_stderr,none": 1.0014935705415513,
						"rougeL_acc,none": 0.2998776009791922,
						"rougeL_acc_stderr,none": 0.016040352966713627,
						"rougeL_diff,none": -7.639846565154001,
						"rougeL_diff_stderr,none": 0.9924318394063312,
						"rougeL_max,none": 49.900538797915495,
						"rougeL_max_stderr,none": 0.8684219125995093
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2692778457772338,
						"acc_stderr,none": 0.015528566637087305,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4120202705722071,
						"acc_stderr,none": 0.014389358093424365,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.12450787401574803,
						"exact_match_stderr,none": 0.007326044786419023
					},
					"wic": {
						"acc,none": 0.5062695924764891,
						"acc_stderr,none": 0.019809163801196513,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6339933528504801,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5518545584363792,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.484950606251948,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7269139700078927,
						"acc_stderr,none": 0.012522020105869456,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.4230769230769231,
						"acc_stderr,none": 0.048679937479186836,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8608058608058609,
						"acc_stderr,none": 0.020988366070851,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6199999999999999,
						"acc_stderr,none": 0.06740995761085669,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.021772369465547194,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.02236516042423134,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.020475118092988968,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.020143572847290785,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.02217510926561317,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.564,
						"acc_stderr,none": 0.0221989546414768,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.642,
						"acc_stderr,none": 0.021461434862859122,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.020143572847290778,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.020629569998345393,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43180722891566264,
						"acc_stderr,none": 0.046774996360704875,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3385542168674699,
						"acc_stderr,none": 0.00948525020851688,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4666666666666667,
						"acc_stderr,none": 0.009999776793187627,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4771084337349398,
						"acc_stderr,none": 0.010011563747774337,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39397590361445783,
						"acc_stderr,none": 0.009794163014906763,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5269076305220883,
						"acc_stderr,none": 0.010007549970702514,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4891566265060241,
						"acc_stderr,none": 0.010019715824483494,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4779116465863454,
						"acc_stderr,none": 0.010012288645591784,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41767068273092367,
						"acc_stderr,none": 0.009885277727840175,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4811244979919679,
						"acc_stderr,none": 0.010014928901071305,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.41044176706827307,
						"acc_stderr,none": 0.00985999467258512,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42008032128514056,
						"acc_stderr,none": 0.009893219469115701,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.41927710843373495,
						"acc_stderr,none": 0.009890599137391928,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41244979919678715,
						"acc_stderr,none": 0.009867237678555586,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40160642570281124,
						"acc_stderr,none": 0.009826103601507121,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3441767068273092,
						"acc_stderr,none": 0.009522954469806038,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6313097888213706,
						"acc_stderr,none": 0.06301384286183202,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.587028457974851,
						"acc_stderr,none": 0.012670716290966718,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7882197220383852,
						"acc_stderr,none": 0.010514241109625353,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7180675049636003,
						"acc_stderr,none": 0.011578884735064793,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5764394440767704,
						"acc_stderr,none": 0.012715871382881438,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5949702183984117,
						"acc_stderr,none": 0.012632887218751377,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6684315023163467,
						"acc_stderr,none": 0.0121150890818801,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5519523494374586,
						"acc_stderr,none": 0.012797478885304732,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6836532097948379,
						"acc_stderr,none": 0.011967713146973766,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5539377895433488,
						"acc_stderr,none": 0.012792037953589647,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5797485109199206,
						"acc_stderr,none": 0.012702405649149106,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6419589675711449,
						"acc_stderr,none": 0.012337624883487573,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8174870757473589,
						"acc_stderr,none": 0.03905231772846098,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.006740838111907546,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.05006642805041921,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7424400417101147,
						"acc_stderr,none": 0.014128209029143982,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.8174904942965779,
						"acc_stderr,none": 0.023863462284014612,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6857142857142857,
						"acc_stderr,none": 0.0261980577440264,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7718253968253969,
						"acc_stderr,none": 0.018711525330668003,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_0_pth"
	},
	"./rwkv-x-dev/1_3-C0-PREPRERUN-rwkv-30_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6223224351747464,
						"acc_norm,none": 0.6200676437429538,
						"acc_norm_stderr,none": 0.0857236971218961,
						"acc_stderr,none": 0.10149948167994714,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4475,
						"acc_stderr,none": 0.05187103629489488,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8153880597014925,
						"acc_stderr,none": 0.1633423887045504,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.3002072180970472,
						"acc_norm,none": 0.3002072180970472,
						"acc_norm_stderr,none": 0.052473501369426766,
						"acc_stderr,none": 0.052473501369426766,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.6956288708909004,
						"acc_stderr,none": 0.003805174234971218,
						"alias": "glue",
						"f1,none": 0.6887191563421935,
						"f1_stderr,none": 0.0001653135140877446,
						"mcc,none": 0.1883103190340171,
						"mcc_stderr,none": 0.0262812907905269
					},
					"lambada": {
						"acc,none": 0.6991073161265282,
						"acc_stderr,none": 0.025969269677272484,
						"alias": "lambada",
						"perplexity,none": 4.03043069663995,
						"perplexity_stderr,none": 0.40304461678042
					},
					"lambada_multilingual": {
						"acc,none": 0.5401125557927421,
						"acc_stderr,none": 0.08623041572761711,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.85535270175715,
						"perplexity_stderr,none": 8.2387272906129
					},
					"mmlu": {
						"acc,none": 0.36597350804728673,
						"acc_stderr,none": 0.07954926761608928,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3523910733262487,
						"acc_stderr,none": 0.0767171024538961,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.4235597038944319,
						"acc_stderr,none": 0.07911482455386332,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.38446538836529087,
						"acc_stderr,none": 0.0663097696603997,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3114494132572153,
						"acc_stderr,none": 0.07202961117104478,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4820714285714286,
						"acc_stderr,none": 0.05687583484413734,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7328516146319968,
						"acc_norm,none": 0.6243976105740026,
						"acc_norm_stderr,none": 0.009437165654337793,
						"acc_stderr,none": 0.1539036691531295,
						"alias": "pythia",
						"bits_per_byte,none": 0.6352470906555451,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5532037447253297,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2416893989842417,
						"perplexity_stderr,none": 0.0626538019794447,
						"word_perplexity,none": 10.533788265986635,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3293439827869272,
						"acc_stderr,none": 0.001569751494002335,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31946144430844553,
						"bleu_acc_stderr,none": 0.016322644182960505,
						"bleu_diff,none": -6.412410288083058,
						"bleu_diff_stderr,none": 0.8722384847438415,
						"bleu_max,none": 26.930015329130203,
						"bleu_max_stderr,none": 0.8085015917893001,
						"rouge1_acc,none": 0.2864137086903305,
						"rouge1_acc_stderr,none": 0.015826142439502353,
						"rouge1_diff,none": -8.522217598446215,
						"rouge1_diff_stderr,none": 0.9253507541541494,
						"rouge1_max,none": 52.23106959677384,
						"rouge1_max_stderr,none": 0.8725723538509482,
						"rouge2_acc,none": 0.26438188494492043,
						"rouge2_acc_stderr,none": 0.015438211119522495,
						"rouge2_diff,none": -10.04732723165974,
						"rouge2_diff_stderr,none": 1.1316617230538066,
						"rouge2_max,none": 36.33254780289301,
						"rouge2_max_stderr,none": 1.0256135646393412,
						"rougeL_acc,none": 0.29253365973072215,
						"rougeL_acc_stderr,none": 0.015925597445286165,
						"rougeL_diff,none": -8.707625480067987,
						"rougeL_diff_stderr,none": 0.9450801901077751,
						"rougeL_max,none": 49.387256832280386,
						"rougeL_max_stderr,none": 0.890587396141587
					},
					"xcopa": {
						"acc,none": 0.6247272727272727,
						"acc_stderr,none": 0.06961187297007985,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4380722891566265,
						"acc_stderr,none": 0.05216231318679556,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6282413813849949,
						"acc_stderr,none": 0.06292319963000094,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8174870757473589,
						"acc_stderr,none": 0.03836759408197828,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6223224351747464,
						"acc_norm,none": 0.6200676437429538,
						"acc_norm_stderr,none": 0.0857236971218961,
						"acc_stderr,none": 0.10149948167994714,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4475,
						"acc_stderr,none": 0.05187103629489488,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.553,
						"acc_stderr,none": 0.015730176046009074,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.015615500115072957,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3825,
						"acc_stderr,none": 0.014035394017899877,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.40784982935153585,
						"acc_norm,none": 0.439419795221843,
						"acc_norm_stderr,none": 0.01450374782358013,
						"acc_stderr,none": 0.014361097288449693,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7281144781144782,
						"acc_norm,none": 0.7091750841750841,
						"acc_norm_stderr,none": 0.00931881592117665,
						"acc_stderr,none": 0.009129795867310492,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8153880597014925,
						"acc_stderr,none": 0.1633423887045504,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745921,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611431,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.003299983316607816,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.01074366913239734,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796394,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.797,
						"acc_stderr,none": 0.01272607374459827,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.015356947477797579,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340971,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.0098200016513457,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346933,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426583,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.0073351758537068285,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118757,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.0061250727764261105,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996698,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697596,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.00431945108291063,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248111,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.717,
						"acc_stderr,none": 0.01425181090648175,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.014632638658632902,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.709,
						"acc_stderr,none": 0.014370995982377939,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666704,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904609,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274702,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.382,
						"acc_stderr,none": 0.015372453034968526,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866437,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.824,
						"acc_stderr,none": 0.012048616898597505,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.571,
						"acc_stderr,none": 0.015658997547870247,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.01423652621529135,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695798,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696863,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.00872852720607479,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.643,
						"acc_stderr,none": 0.015158521721486766,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315148,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.443,
						"acc_stderr,none": 0.015716169953204105,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.611,
						"acc_stderr,none": 0.015424555647308498,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.015752210388771844,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.599,
						"acc_stderr,none": 0.015506109745498325,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.015819173374302695,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037186,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.00905439020486644,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.759,
						"acc_stderr,none": 0.013531522534515448,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286398,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.003444977194099829,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.824,
						"acc_stderr,none": 0.012048616898597516,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858925,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571408,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.00384574957450301,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.014606483127342763,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.455,
						"acc_stderr,none": 0.01575510149834709,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653893,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280311,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.015387682761897068,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890145,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695794,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.01570498795436179,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235244,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.00676326413366666,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074794,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611484,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910612,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.375,
						"acc_stderr,none": 0.015316971293620996,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.328,
						"acc_stderr,none": 0.014853842487270333,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.3002072180970472,
						"acc_norm,none": 0.3002072180970472,
						"acc_norm_stderr,none": 0.052473501369426766,
						"acc_stderr,none": 0.052473501369426766,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2781065088757396,
						"acc_norm,none": 0.2781065088757396,
						"acc_norm_stderr,none": 0.034569054303762434,
						"acc_stderr,none": 0.034569054303762434,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.30405405405405406,
						"acc_norm,none": 0.30405405405405406,
						"acc_norm_stderr,none": 0.03794062549620373,
						"acc_stderr,none": 0.03794062549620373,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.40625,
						"acc_norm,none": 0.40625,
						"acc_norm_stderr,none": 0.03894932504400619,
						"acc_stderr,none": 0.03894932504400619,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624336,
						"acc_stderr,none": 0.03546563019624336,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.35406698564593303,
						"acc_norm,none": 0.35406698564593303,
						"acc_norm_stderr,none": 0.03315925698294869,
						"acc_stderr,none": 0.03315925698294869,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2748091603053435,
						"acc_norm,none": 0.2748091603053435,
						"acc_norm_stderr,none": 0.039153454088478354,
						"acc_stderr,none": 0.039153454088478354,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.0402637721078731,
						"acc_stderr,none": 0.0402637721078731,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.0452235007738203,
						"acc_stderr,none": 0.0452235007738203,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.32507739938080493,
						"acc_norm,none": 0.32507739938080493,
						"acc_norm_stderr,none": 0.02610312109754256,
						"acc_stderr,none": 0.02610312109754256,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.27941176470588236,
						"acc_norm,none": 0.27941176470588236,
						"acc_norm_stderr,none": 0.031493281045079556,
						"acc_stderr,none": 0.031493281045079556,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.33519553072625696,
						"acc_norm,none": 0.33519553072625696,
						"acc_norm_stderr,none": 0.035382301081428424,
						"acc_stderr,none": 0.035382301081428424,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422647,
						"acc_stderr,none": 0.028146970599422647,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.041988576623712234,
						"acc_stderr,none": 0.041988576623712234,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.37383177570093457,
						"acc_norm,none": 0.37383177570093457,
						"acc_norm_stderr,none": 0.04699273118994851,
						"acc_stderr,none": 0.04699273118994851,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3867924528301887,
						"acc_norm,none": 0.3867924528301887,
						"acc_norm_stderr,none": 0.04752784159123842,
						"acc_stderr,none": 0.04752784159123842,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.040191074725573483,
						"acc_stderr,none": 0.040191074725573483,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2761904761904762,
						"acc_norm,none": 0.2761904761904762,
						"acc_norm_stderr,none": 0.043842955869188835,
						"acc_stderr,none": 0.043842955869188835,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.29245283018867924,
						"acc_norm,none": 0.29245283018867924,
						"acc_norm_stderr,none": 0.04439263906199629,
						"acc_stderr,none": 0.04439263906199629,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.29304029304029305,
						"acc_norm,none": 0.29304029304029305,
						"acc_norm_stderr,none": 0.027597932553584063,
						"acc_stderr,none": 0.027597932553584063,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3088235294117647,
						"acc_norm,none": 0.3088235294117647,
						"acc_norm_stderr,none": 0.03242661719827218,
						"acc_stderr,none": 0.03242661719827218,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.25146198830409355,
						"acc_norm,none": 0.25146198830409355,
						"acc_norm_stderr,none": 0.033275044238468436,
						"acc_stderr,none": 0.033275044238468436,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.03814280082617515,
						"acc_stderr,none": 0.03814280082617515,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2949640287769784,
						"acc_norm,none": 0.2949640287769784,
						"acc_norm_stderr,none": 0.03881956126735707,
						"acc_stderr,none": 0.03881956126735707,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.32075471698113206,
						"acc_norm,none": 0.32075471698113206,
						"acc_norm_stderr,none": 0.03713396279871006,
						"acc_stderr,none": 0.03713396279871006,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.32515337423312884,
						"acc_norm,none": 0.32515337423312884,
						"acc_norm_stderr,none": 0.03680350371286461,
						"acc_stderr,none": 0.03680350371286461,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2616279069767442,
						"acc_norm,none": 0.2616279069767442,
						"acc_norm_stderr,none": 0.03361101403890494,
						"acc_stderr,none": 0.03361101403890494,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.27380952380952384,
						"acc_norm,none": 0.27380952380952384,
						"acc_norm_stderr,none": 0.028145741115683864,
						"acc_stderr,none": 0.028145741115683864,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2828282828282828,
						"acc_norm,none": 0.2828282828282828,
						"acc_norm_stderr,none": 0.032087795587867514,
						"acc_stderr,none": 0.032087795587867514,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.42016806722689076,
						"acc_norm,none": 0.42016806722689076,
						"acc_norm_stderr,none": 0.03206183783236152,
						"acc_stderr,none": 0.03206183783236152,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.02725685083881996,
						"acc_stderr,none": 0.02725685083881996,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.0391545063041425,
						"acc_stderr,none": 0.0391545063041425,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.32167832167832167,
						"acc_norm,none": 0.32167832167832167,
						"acc_norm_stderr,none": 0.03919986517659165,
						"acc_stderr,none": 0.03919986517659165,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.03503824441133676,
						"acc_stderr,none": 0.03503824441133676,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3221476510067114,
						"acc_norm,none": 0.3221476510067114,
						"acc_norm_stderr,none": 0.038411757592369186,
						"acc_stderr,none": 0.038411757592369186,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.288135593220339,
						"acc_norm,none": 0.288135593220339,
						"acc_norm_stderr,none": 0.04187011593049808,
						"acc_stderr,none": 0.04187011593049808,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.03494959016177541,
						"acc_stderr,none": 0.03494959016177541,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878285,
						"acc_stderr,none": 0.04122066502878285,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03873144730600104,
						"acc_stderr,none": 0.03873144730600104,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.039325376803928704,
						"acc_stderr,none": 0.039325376803928704,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3313953488372093,
						"acc_norm,none": 0.3313953488372093,
						"acc_norm_stderr,none": 0.03599646438179591,
						"acc_stderr,none": 0.03599646438179591,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2846715328467153,
						"acc_norm,none": 0.2846715328467153,
						"acc_norm_stderr,none": 0.022286036929717288,
						"acc_stderr,none": 0.022286036929717288,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.38317757009345793,
						"acc_norm,none": 0.38317757009345793,
						"acc_norm_stderr,none": 0.03331120297324246,
						"acc_stderr,none": 0.03331120297324246,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3008130081300813,
						"acc_norm,none": 0.3008130081300813,
						"acc_norm_stderr,none": 0.04152073768551428,
						"acc_stderr,none": 0.04152073768551428,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.32786885245901637,
						"acc_norm,none": 0.32786885245901637,
						"acc_norm_stderr,none": 0.04267606874299955,
						"acc_stderr,none": 0.04267606874299955,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.031840067304739414,
						"acc_stderr,none": 0.031840067304739414,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.29444444444444445,
						"acc_norm,none": 0.29444444444444445,
						"acc_norm_stderr,none": 0.034067540013496884,
						"acc_stderr,none": 0.034067540013496884,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.30687830687830686,
						"acc_norm,none": 0.30687830687830686,
						"acc_norm_stderr,none": 0.03363635410184864,
						"acc_stderr,none": 0.03363635410184864,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.04167808180844153,
						"acc_stderr,none": 0.04167808180844153,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3310344827586207,
						"acc_norm,none": 0.3310344827586207,
						"acc_norm_stderr,none": 0.039215453124671215,
						"acc_stderr,none": 0.039215453124671215,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.04622501635210239,
						"acc_stderr,none": 0.04622501635210239,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2914285714285714,
						"acc_norm,none": 0.2914285714285714,
						"acc_norm_stderr,none": 0.034449526562290195,
						"acc_stderr,none": 0.034449526562290195,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846666,
						"acc_stderr,none": 0.030469670650846666,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.022360679774997897,
						"acc_stderr,none": 0.022360679774997897,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.36637931034482757,
						"acc_norm,none": 0.36637931034482757,
						"acc_norm_stderr,none": 0.03170108710059699,
						"acc_stderr,none": 0.03170108710059699,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3505747126436782,
						"acc_norm,none": 0.3505747126436782,
						"acc_norm_stderr,none": 0.03627703962615275,
						"acc_stderr,none": 0.03627703962615275,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.0391545063041425,
						"acc_stderr,none": 0.0391545063041425,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3185840707964602,
						"acc_norm,none": 0.3185840707964602,
						"acc_norm_stderr,none": 0.031061820840326115,
						"acc_stderr,none": 0.031061820840326115,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.036085410115739666,
						"acc_stderr,none": 0.036085410115739666,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03560846537586734,
						"acc_stderr,none": 0.03560846537586734,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3105590062111801,
						"acc_norm,none": 0.3105590062111801,
						"acc_norm_stderr,none": 0.036581425432887386,
						"acc_stderr,none": 0.036581425432887386,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.03675892481369823,
						"acc_stderr,none": 0.03675892481369823,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.1883103190340171,
						"mcc_stderr,none": 0.0262812907905269
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.03265986323710906,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.6956288708909004,
						"acc_stderr,none": 0.003805174234971218,
						"alias": "glue",
						"f1,none": 0.6887191563421935,
						"f1_stderr,none": 0.0001653135140877446,
						"mcc,none": 0.1883103190340171,
						"mcc_stderr,none": 0.0262812907905269
					},
					"hellaswag": {
						"acc,none": 0.5406293567018522,
						"acc_norm,none": 0.7261501692889862,
						"acc_norm_stderr,none": 0.0044502148267071715,
						"acc_stderr,none": 0.004973280417705513,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.6991073161265282,
						"acc_stderr,none": 0.025969269677272484,
						"alias": "lambada",
						"perplexity,none": 4.03043069663995,
						"perplexity_stderr,none": 0.40304461678042
					},
					"lambada_multilingual": {
						"acc,none": 0.5401125557927421,
						"acc_stderr,none": 0.08623041572761711,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.85535270175715,
						"perplexity_stderr,none": 8.2387272906129
					},
					"lambada_openai": {
						"acc,none": 0.7494663302930332,
						"acc_stderr,none": 0.006037000144073585,
						"alias": " - lambada_openai",
						"perplexity,none": 3.2416893989842417,
						"perplexity_stderr,none": 0.0626538019794447
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4259654570153309,
						"acc_stderr,none": 0.006889191823711755,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.55119917872118,
						"perplexity_stderr,none": 1.918254529669063
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7494663302930332,
						"acc_stderr,none": 0.006037000144073589,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.2431582815310613,
						"perplexity_stderr,none": 0.06269625852204055
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45565689889384825,
						"acc_stderr,none": 0.00693852902647945,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.54469592209054,
						"perplexity_stderr,none": 1.4078402528286282
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5521055695711237,
						"acc_stderr,none": 0.00692804927623977,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.306348190280723,
						"perplexity_stderr,none": 0.7935504199235808
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5173685231903745,
						"acc_stderr,none": 0.006961773596960153,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.631361936162236,
						"perplexity_stderr,none": 1.149683274497286
					},
					"lambada_standard": {
						"acc,none": 0.6487483019600233,
						"acc_stderr,none": 0.006650578225573468,
						"alias": " - lambada_standard",
						"perplexity,none": 4.817872812584175,
						"perplexity_stderr,none": 0.1043981068206415
					},
					"logiqa": {
						"acc,none": 0.2411674347158218,
						"acc_norm,none": 0.28110599078341014,
						"acc_norm_stderr,none": 0.017632374626460005,
						"acc_stderr,none": 0.016779369344911064,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.36597350804728673,
						"acc_stderr,none": 0.07954926761608928,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.15,
						"acc_stderr,none": 0.03588702812826371,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.37777777777777777,
						"acc_stderr,none": 0.04188307537595853,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.35526315789473684,
						"acc_stderr,none": 0.03894734487013317,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.38113207547169814,
						"acc_stderr,none": 0.029890609686286637,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3958333333333333,
						"acc_stderr,none": 0.04089465449325583,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542126,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909282,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2947976878612717,
						"acc_stderr,none": 0.034765996075164785,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.043364327079931785,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.04923659639173309,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.4085106382978723,
						"acc_stderr,none": 0.03213418026701576,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03947152782669415,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.3103448275862069,
						"acc_stderr,none": 0.03855289616378949,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2724867724867725,
						"acc_stderr,none": 0.022930973071633356,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.04073524322147127,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.432258064516129,
						"acc_stderr,none": 0.028181739720019416,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2561576354679803,
						"acc_stderr,none": 0.030712730070982592,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.48484848484848486,
						"acc_stderr,none": 0.03902551007374448,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.43434343434343436,
						"acc_stderr,none": 0.03531505879359183,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.49740932642487046,
						"acc_stderr,none": 0.03608390745384488,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.32051282051282054,
						"acc_stderr,none": 0.023661296393964273,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.26296296296296295,
						"acc_stderr,none": 0.02684205787383371,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.31512605042016806,
						"acc_stderr,none": 0.030176808288974337,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2847682119205298,
						"acc_stderr,none": 0.03684881521389024,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.44036697247706424,
						"acc_stderr,none": 0.021284310623761543,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.20833333333333334,
						"acc_stderr,none": 0.027696910713093933,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.45098039215686275,
						"acc_stderr,none": 0.03492406104163613,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5232067510548524,
						"acc_stderr,none": 0.032512152011410174,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.43946188340807174,
						"acc_stderr,none": 0.03331092511038179,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.40458015267175573,
						"acc_stderr,none": 0.043046937953806645,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3523910733262487,
						"acc_stderr,none": 0.0767171024538961,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.371900826446281,
						"acc_stderr,none": 0.04412015806624504,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.35185185185185186,
						"acc_stderr,none": 0.046166311118017125,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3496932515337423,
						"acc_stderr,none": 0.03746668325470021,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.04432804055291519,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.46601941747572817,
						"acc_stderr,none": 0.04939291447273482,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.5641025641025641,
						"acc_stderr,none": 0.032485775115784016,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5504469987228607,
						"acc_stderr,none": 0.017788725283507337,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3439306358381503,
						"acc_stderr,none": 0.025574123786546665,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.22569832402234638,
						"acc_stderr,none": 0.013981395058455047,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.39215686274509803,
						"acc_stderr,none": 0.027956046165424516,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.4235597038944319,
						"acc_stderr,none": 0.07911482455386332,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4790996784565916,
						"acc_stderr,none": 0.028373270961069414,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.43209876543209874,
						"acc_stderr,none": 0.02756301097160668,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2695035460992908,
						"acc_stderr,none": 0.026469036818590634,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3161668839634941,
						"acc_stderr,none": 0.011875780894386583,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3492647058823529,
						"acc_stderr,none": 0.02895975519682487,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3660130718954248,
						"acc_stderr,none": 0.01948802574552966,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.43636363636363634,
						"acc_stderr,none": 0.04750185058907296,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2571428571428571,
						"acc_stderr,none": 0.02797982353874455,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.38446538836529087,
						"acc_stderr,none": 0.0663097696603997,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5223880597014925,
						"acc_stderr,none": 0.035319879302087305,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3114494132572153,
						"acc_stderr,none": 0.07202961117104478,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.03726214354322415,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.5263157894736842,
						"acc_stderr,none": 0.03829509868994727,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7709628120224147,
						"acc_stderr,none": 0.004241765833887198,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7716639544344996,
						"acc_stderr,none": 0.0042335285876935755,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7156862745098039,
						"acc_stderr,none": 0.022359549679883527,
						"alias": " - mrpc",
						"f1,none": 0.8242424242424242,
						"f1_stderr,none": 0.016038267397030438
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.014404432132963989,
						"exact_match_stderr,remove_whitespace": 0.001983371191259785
					},
					"openbookqa": {
						"acc,none": 0.304,
						"acc_norm,none": 0.424,
						"acc_norm_stderr,none": 0.022122993778135404,
						"acc_stderr,none": 0.02059164957122493,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4395,
						"acc_stderr,none": 0.011100968009384218,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3685,
						"acc_stderr,none": 0.010789443036622489,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4135,
						"acc_stderr,none": 0.011014514731361837,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5465,
						"acc_stderr,none": 0.011134669525078671,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5445,
						"acc_stderr,none": 0.011138757154883975,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.01115079235234166,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.011170245619215438,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4820714285714286,
						"acc_stderr,none": 0.05687583484413734,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7731229597388466,
						"acc_norm,none": 0.7829162132752993,
						"acc_norm_stderr,none": 0.00961870841575678,
						"acc_stderr,none": 0.009771584259215161,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7328516146319968,
						"acc_norm,none": 0.6243976105740026,
						"acc_norm_stderr,none": 0.009437165654337793,
						"acc_stderr,none": 0.1539036691531295,
						"alias": "pythia",
						"bits_per_byte,none": 0.6352470906555451,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5532037447253297,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2416893989842417,
						"perplexity_stderr,none": 0.0626538019794447,
						"word_perplexity,none": 10.533788265986635,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6816720257234726,
						"acc_stderr,none": 0.0023167471587654275,
						"alias": " - qqp",
						"f1,none": 0.6875455207574654,
						"f1_stderr,none": 0.002613486083661407
					},
					"record": {
						"alias": "record",
						"em,none": 0.273,
						"em_stderr,none": 0.004455231184628846,
						"f1,none": 0.28287190500497816,
						"f1_stderr,none": 0.00446527004067906
					},
					"rte": {
						"acc,none": 0.7256317689530686,
						"acc_stderr,none": 0.026857804902852223,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.947,
						"acc_norm,none": 0.929,
						"acc_norm_stderr,none": 0.008125578442487917,
						"acc_stderr,none": 0.00708810561724645,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.8967889908256881,
						"acc_stderr,none": 0.010308585297584697,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3293439827869272,
						"acc_stderr,none": 0.001569751494002335,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31946144430844553,
						"bleu_acc_stderr,none": 0.016322644182960505,
						"bleu_diff,none": -6.412410288083058,
						"bleu_diff_stderr,none": 0.8722384847438415,
						"bleu_max,none": 26.930015329130203,
						"bleu_max_stderr,none": 0.8085015917893001,
						"rouge1_acc,none": 0.2864137086903305,
						"rouge1_acc_stderr,none": 0.015826142439502353,
						"rouge1_diff,none": -8.522217598446215,
						"rouge1_diff_stderr,none": 0.9253507541541494,
						"rouge1_max,none": 52.23106959677384,
						"rouge1_max_stderr,none": 0.8725723538509482,
						"rouge2_acc,none": 0.26438188494492043,
						"rouge2_acc_stderr,none": 0.015438211119522495,
						"rouge2_diff,none": -10.04732723165974,
						"rouge2_diff_stderr,none": 1.1316617230538066,
						"rouge2_max,none": 36.33254780289301,
						"rouge2_max_stderr,none": 1.0256135646393412,
						"rougeL_acc,none": 0.29253365973072215,
						"rougeL_acc_stderr,none": 0.015925597445286165,
						"rougeL_diff,none": -8.707625480067987,
						"rougeL_diff_stderr,none": 0.9450801901077751,
						"rougeL_max,none": 49.387256832280386,
						"rougeL_max_stderr,none": 0.890587396141587
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.31946144430844553,
						"bleu_acc_stderr,none": 0.016322644182960505,
						"bleu_diff,none": -6.412410288083058,
						"bleu_diff_stderr,none": 0.8722384847438415,
						"bleu_max,none": 26.930015329130203,
						"bleu_max_stderr,none": 0.8085015917893001,
						"rouge1_acc,none": 0.2864137086903305,
						"rouge1_acc_stderr,none": 0.015826142439502353,
						"rouge1_diff,none": -8.522217598446215,
						"rouge1_diff_stderr,none": 0.9253507541541494,
						"rouge1_max,none": 52.23106959677384,
						"rouge1_max_stderr,none": 0.8725723538509482,
						"rouge2_acc,none": 0.26438188494492043,
						"rouge2_acc_stderr,none": 0.015438211119522495,
						"rouge2_diff,none": -10.04732723165974,
						"rouge2_diff_stderr,none": 1.1316617230538066,
						"rouge2_max,none": 36.33254780289301,
						"rouge2_max_stderr,none": 1.0256135646393412,
						"rougeL_acc,none": 0.29253365973072215,
						"rougeL_acc_stderr,none": 0.015925597445286165,
						"rougeL_diff,none": -8.707625480067987,
						"rougeL_diff_stderr,none": 0.9450801901077751,
						"rougeL_max,none": 49.387256832280386,
						"rougeL_max_stderr,none": 0.890587396141587
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2558139534883721,
						"acc_stderr,none": 0.015274176219283361,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.40287401208548235,
						"acc_stderr,none": 0.014194390862823389,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6352470906555451,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5532037447253297,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.533788265986635,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7277032359905288,
						"acc_stderr,none": 0.012510697991453937,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.40384615384615385,
						"acc_stderr,none": 0.04834688952654018,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6247272727272727,
						"acc_stderr,none": 0.06961187297007985,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.021854684955611256,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.022352791650914163,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.02009995064750324,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.746,
						"acc_stderr,none": 0.019486596801643382,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.022374298166353185,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.02224224437573102,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.022149790663861923,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.592,
						"acc_stderr,none": 0.02200091089387719,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.632,
						"acc_stderr,none": 0.02158898256835354,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.712,
						"acc_stderr,none": 0.02027150383507522,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.020629569998345396,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4380722891566265,
						"acc_stderr,none": 0.05216231318679556,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3353413654618474,
						"acc_stderr,none": 0.009463034891512708,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4911646586345382,
						"acc_stderr,none": 0.010020508033762624,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4911646586345382,
						"acc_stderr,none": 0.010020508033762624,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38433734939759034,
						"acc_stderr,none": 0.009750238765722532,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5325301204819277,
						"acc_stderr,none": 0.010000839483876006,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5060240963855421,
						"acc_stderr,none": 0.01002134544404757,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4903614457831325,
						"acc_stderr,none": 0.010020210558438302,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41967871485943775,
						"acc_stderr,none": 0.009891912665432365,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4923694779116466,
						"acc_stderr,none": 0.010020905731542304,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39718875502008033,
						"acc_stderr,none": 0.009807915070677296,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41967871485943775,
						"acc_stderr,none": 0.009891912665432363,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44096385542168676,
						"acc_stderr,none": 0.009951968490273204,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40763052208835343,
						"acc_stderr,none": 0.009849569202733721,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41526104417670684,
						"acc_stderr,none": 0.009877093420328583,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3473895582329317,
						"acc_stderr,none": 0.0095438354093349,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6282413813849949,
						"acc_stderr,none": 0.06292319963000094,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5923229649238915,
						"acc_stderr,none": 0.012645876488040303,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7816015883520847,
						"acc_stderr,none": 0.010632343054700497,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.71475843812045,
						"acc_stderr,none": 0.011619771152072333,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5618795499669094,
						"acc_stderr,none": 0.012768206616277762,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.599602911978822,
						"acc_stderr,none": 0.01260923817555117,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6657842488418266,
						"acc_stderr,none": 0.012139246810918223,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5393778954334878,
						"acc_stderr,none": 0.012827159238891913,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6836532097948379,
						"acc_stderr,none": 0.011967713146973763,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5532759761747187,
						"acc_stderr,none": 0.012793874526730201,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5903375248180013,
						"acc_stderr,none": 0.01265536903075035,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.628060886829914,
						"acc_stderr,none": 0.01243793623520202,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8174870757473589,
						"acc_stderr,none": 0.03836759408197828,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.006740838111907548,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.05066394254941721,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7518248175182481,
						"acc_stderr,none": 0.013955800392484941,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7946768060836502,
						"acc_stderr,none": 0.024955347906737913,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6825396825396826,
						"acc_stderr,none": 0.026269018848607703,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7698412698412699,
						"acc_stderr,none": 0.01876853300590487,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C0-PREPRERUN-rwkv-30_pth"
	},
	"./rwkv-x-dev/1_3-C0-PREPRERUN-rwkv-40_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6195039458850057,
						"acc_norm,none": 0.6200676437429538,
						"acc_norm_stderr,none": 0.08492895868030152,
						"acc_stderr,none": 0.10456786902151682,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4459375,
						"acc_stderr,none": 0.04931426218124011,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.1344,
						"acc_stderr,none": 0.10362345847620769,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8161492537313433,
						"acc_stderr,none": 0.1677166847224475,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.29049034175334326,
						"acc_norm,none": 0.29049034175334326,
						"acc_norm_stderr,none": 0.1282424367047985,
						"acc_stderr,none": 0.1282424367047985,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.3078052149887758,
						"acc_norm,none": 0.3078052149887758,
						"acc_norm_stderr,none": 0.0541021857244165,
						"acc_stderr,none": 0.0541021857244165,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4549418604651163,
						"likelihood_diff_stderr,none": 0.522085518739553,
						"pct_stereotype,none": 0.6262671437090042,
						"pct_stereotype_stderr,none": 0.06668513604990244
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.06988188976377953,
						"exact_match_stderr,none": 0.005657133257635411
					},
					"glue": {
						"acc,none": 0.6949299649350421,
						"acc_stderr,none": 0.05567191094265838,
						"alias": "glue",
						"f1,none": 0.692586478317217,
						"f1_stderr,none": 0.00016762713714190596,
						"mcc,none": 0.18075435456524627,
						"mcc_stderr,none": 0.0005548256504666373
					},
					"kmmlu": {
						"acc,none": 0.2301472711521802,
						"acc_norm,none": 0.2301472711521802,
						"acc_norm_stderr,none": 0.02580232986416977,
						"acc_stderr,none": 0.02580232986416977,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5365051523788643,
						"acc_norm,none": 0.56,
						"acc_norm_stderr,none": 0.0004937875751502986,
						"acc_stderr,none": 0.0445137395986217,
						"alias": "kobest",
						"f1,none": 0.45937334179843003,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6985251309916554,
						"acc_stderr,none": 0.025782152988454606,
						"alias": "lambada",
						"perplexity,none": 4.04606534626777,
						"perplexity_stderr,none": 0.4020069035786625
					},
					"lambada_cloze": {
						"acc,none": 0.021637880846109063,
						"acc_stderr,none": 0.0077858189399822005,
						"alias": "lambada_cloze",
						"perplexity,none": 1171.9566712676258,
						"perplexity_stderr,none": 422.1475942547852
					},
					"lambada_multilingual": {
						"acc,none": 0.5383271880457986,
						"acc_stderr,none": 0.08681475823526331,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.927449833314707,
						"perplexity_stderr,none": 8.24091373962889
					},
					"mmlu": {
						"acc,none": 0.3681099558467455,
						"acc_stderr,none": 0.07919491400008644,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.35557917109458026,
						"acc_stderr,none": 0.07327843003509107,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.4193756034760219,
						"acc_stderr,none": 0.08151257562767407,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3909652258693533,
						"acc_stderr,none": 0.06995180529186491,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3139866793529972,
						"acc_stderr,none": 0.0718850203775544,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.363236337828247,
						"acc_norm,none": 0.33207782005896697,
						"acc_norm_stderr,none": 0.000101453693644316,
						"acc_stderr,none": 0.08674200919923794,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4799285714285714,
						"acc_stderr,none": 0.056738546899646254,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.733359115336392,
						"acc_norm,none": 0.6243752672010469,
						"acc_norm_stderr,none": 0.00926095035495321,
						"acc_stderr,none": 0.15745562639614497,
						"alias": "pythia",
						"bits_per_byte,none": 0.63504530762049,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5529865205440583,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.26144280861378,
						"perplexity_stderr,none": 0.06329504499740238,
						"word_perplexity,none": 10.525912748850747,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.39361702127659576,
						"acc_norm,none": 0.4450354609929078,
						"acc_norm_stderr,none": 0.06457916699277962,
						"acc_stderr,none": 0.05181708791288234,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6480982330038934,
						"acc_stderr,none": 0.06013900935873594,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.32801648606823786,
						"acc_stderr,none": 0.0016111397392200954,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.016339170373280906,
						"bleu_diff,none": -6.449876019468411,
						"bleu_diff_stderr,none": 0.8648441827511708,
						"bleu_max,none": 26.725615443705127,
						"bleu_max_stderr,none": 0.7972052706762408,
						"rouge1_acc,none": 0.2876376988984088,
						"rouge1_acc_stderr,none": 0.01584631510139481,
						"rouge1_diff,none": -8.362910310533742,
						"rouge1_diff_stderr,none": 0.9393731907326451,
						"rouge1_max,none": 52.12546817223328,
						"rouge1_max_stderr,none": 0.8616366398506576,
						"rouge2_acc,none": 0.2631578947368421,
						"rouge2_acc_stderr,none": 0.015415241740237009,
						"rouge2_diff,none": -9.90803343683518,
						"rouge2_diff_stderr,none": 1.1441643043034864,
						"rouge2_max,none": 36.32378129064672,
						"rouge2_max_stderr,none": 1.0106546403584376,
						"rougeL_acc,none": 0.2937576499388005,
						"rougeL_acc_stderr,none": 0.015945068581236614,
						"rougeL_diff,none": -8.488140323054756,
						"rougeL_diff_stderr,none": 0.9590589495564945,
						"rougeL_max,none": 49.359296251685706,
						"rougeL_max_stderr,none": 0.8794222392570263
					},
					"xcopa": {
						"acc,none": 0.6221818181818181,
						"acc_stderr,none": 0.06802681894210574,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4387951807228916,
						"acc_stderr,none": 0.05134050124375675,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6281210516816076,
						"acc_stderr,none": 0.06241507928658257,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8174870757473589,
						"acc_stderr,none": 0.03902273081363968,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6195039458850057,
						"acc_norm,none": 0.6200676437429538,
						"acc_norm_stderr,none": 0.08492895868030152,
						"acc_stderr,none": 0.10456786902151682,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4459375,
						"acc_stderr,none": 0.04931426218124011,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.01575221038877184,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.418,
						"acc_stderr,none": 0.01560511196754194,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3858333333333333,
						"acc_stderr,none": 0.014058319261878734,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3984641638225256,
						"acc_norm,none": 0.44112627986348124,
						"acc_norm_stderr,none": 0.014509747749064664,
						"acc_stderr,none": 0.014306946052735563,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7285353535353535,
						"acc_norm,none": 0.7083333333333334,
						"acc_norm_stderr,none": 0.009326752065621162,
						"acc_stderr,none": 0.009125362970360621,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.1344,
						"acc_stderr,none": 0.10362345847620769,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0305,
						"acc_stderr,none": 0.003846072169833601,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.461,
						"acc_stderr,none": 0.01114906502023433,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.1545,
						"acc_stderr,none": 0.008083783073189469,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.109,
						"acc_stderr,none": 0.0069702074499936395,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.12,
						"acc_stderr,none": 0.007268178121551632,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.123,
						"acc_stderr,none": 0.007345915956544896,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.041,
						"acc_stderr,none": 0.004435012363831025,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.1785,
						"acc_stderr,none": 0.0085647856234927,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0395,
						"acc_stderr,none": 0.004356531267228614,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.088,
						"acc_stderr,none": 0.006336250787099515,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0013015184381778742,
						"acc_stderr,none": 0.0007511058074590394,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8161492537313433,
						"acc_stderr,none": 0.1677166847224475,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523717,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910624,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045057,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397321,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.00843458014024063,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.797,
						"acc_stderr,none": 0.012726073744598256,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.01543172505386661,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024952,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469362,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045035,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.00751375115747493,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.006199874066337075,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406139,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151098,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306504,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695803,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.01429714686251791,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.683,
						"acc_stderr,none": 0.01472167543888022,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.709,
						"acc_stderr,none": 0.014370995982377944,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823328,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656805,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.416,
						"acc_stderr,none": 0.015594460144140603,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866435,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.824,
						"acc_stderr,none": 0.012048616898597491,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.571,
						"acc_stderr,none": 0.015658997547870243,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.711,
						"acc_stderr,none": 0.014341711358296184,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662739,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037181,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244052,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118592,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.653,
						"acc_stderr,none": 0.015060472031706618,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704159,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.0157049879543618,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.015431725053866608,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.01578268332993763,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.645,
						"acc_stderr,none": 0.015139491543780534,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.537,
						"acc_stderr,none": 0.01577592722726242,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345686,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996695,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.013699915608779773,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286414,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036264,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.00949157995752504,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.826,
						"acc_stderr,none": 0.01199449323097344,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.015459721957493377,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.0068297617561409105,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343972,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030036,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.014683991951087962,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.466,
						"acc_stderr,none": 0.01578268332993762,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785138,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315153,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.01541731797991108,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.011075814808567038,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847165,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.015635487471405182,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.011297239823409293,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897874,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074796,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689109,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306538,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.375,
						"acc_stderr,none": 0.015316971293620996,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.01475865230357488,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7018348623853211,
						"acc_stderr,none": 0.008000892584151419,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.04459412925079224,
						"alias": "cb",
						"f1,none": 0.7126148705096074,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.29049034175334326,
						"acc_norm,none": 0.29049034175334326,
						"acc_norm_stderr,none": 0.1282424367047985,
						"acc_stderr,none": 0.1282424367047985,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966263,
						"acc_stderr,none": 0.06520506636966263,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275463,
						"acc_stderr,none": 0.08124094920275463,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2127659574468085,
						"acc_norm,none": 0.2127659574468085,
						"acc_norm_stderr,none": 0.06034260964773521,
						"acc_stderr,none": 0.06034260964773521,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629555,
						"acc_stderr,none": 0.10497277621629555,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.05443310539518174,
						"acc_stderr,none": 0.05443310539518174,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.35135135135135137,
						"acc_norm,none": 0.35135135135135137,
						"acc_norm_stderr,none": 0.07956541321016082,
						"acc_stderr,none": 0.07956541321016082,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.07988892740217941,
						"acc_stderr,none": 0.07988892740217941,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.45,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.11413288653790232,
						"acc_stderr,none": 0.11413288653790232,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.1049727762162956,
						"acc_stderr,none": 0.1049727762162956,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.06652247352247599,
						"acc_stderr,none": 0.06652247352247599,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633637,
						"acc_stderr,none": 0.09169709590633637,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.4772727272727273,
						"acc_norm,none": 0.4772727272727273,
						"acc_norm_stderr,none": 0.07617047451458002,
						"acc_stderr,none": 0.07617047451458002,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.3695652173913043,
						"acc_norm,none": 0.3695652173913043,
						"acc_norm_stderr,none": 0.07195473383945741,
						"acc_stderr,none": 0.07195473383945741,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.3078052149887758,
						"acc_norm,none": 0.3078052149887758,
						"acc_norm_stderr,none": 0.0541021857244165,
						"acc_stderr,none": 0.0541021857244165,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.3108108108108108,
						"acc_norm,none": 0.3108108108108108,
						"acc_norm_stderr,none": 0.03817320450441154,
						"acc_stderr,none": 0.03817320450441154,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.033635910482728223,
						"acc_stderr,none": 0.033635910482728223,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.41875,
						"acc_norm,none": 0.41875,
						"acc_norm_stderr,none": 0.03912553875691512,
						"acc_stderr,none": 0.03912553875691512,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.28484848484848485,
						"acc_norm,none": 0.28484848484848485,
						"acc_norm_stderr,none": 0.035243908445117836,
						"acc_stderr,none": 0.035243908445117836,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3588516746411483,
						"acc_norm,none": 0.3588516746411483,
						"acc_norm_stderr,none": 0.033258685263024994,
						"acc_stderr,none": 0.033258685263024994,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3053435114503817,
						"acc_norm,none": 0.3053435114503817,
						"acc_norm_stderr,none": 0.04039314978724561,
						"acc_stderr,none": 0.04039314978724561,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.34558823529411764,
						"acc_norm,none": 0.34558823529411764,
						"acc_norm_stderr,none": 0.04092966025145302,
						"acc_stderr,none": 0.04092966025145302,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.35514018691588783,
						"acc_norm,none": 0.35514018691588783,
						"acc_norm_stderr,none": 0.04648144634449114,
						"acc_stderr,none": 0.04648144634449114,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3188854489164087,
						"acc_norm,none": 0.3188854489164087,
						"acc_norm_stderr,none": 0.025971647189191573,
						"acc_stderr,none": 0.025971647189191573,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.27450980392156865,
						"acc_norm,none": 0.27450980392156865,
						"acc_norm_stderr,none": 0.031321798030832924,
						"acc_stderr,none": 0.031321798030832924,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.37988826815642457,
						"acc_norm,none": 0.37988826815642457,
						"acc_norm_stderr,none": 0.0363791806643084,
						"acc_stderr,none": 0.0363791806643084,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.27848101265822783,
						"acc_norm,none": 0.27848101265822783,
						"acc_norm_stderr,none": 0.029178682304842548,
						"acc_stderr,none": 0.029178682304842548,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.041988576623712234,
						"acc_stderr,none": 0.041988576623712234,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004222,
						"acc_stderr,none": 0.04742907046004222,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.39622641509433965,
						"acc_norm,none": 0.39622641509433965,
						"acc_norm_stderr,none": 0.04773249298367361,
						"acc_stderr,none": 0.04773249298367361,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.040191074725573483,
						"acc_stderr,none": 0.040191074725573483,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.30036630036630035,
						"acc_norm,none": 0.30036630036630035,
						"acc_norm_stderr,none": 0.027795629283121376,
						"acc_stderr,none": 0.027795629283121376,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3088235294117647,
						"acc_norm,none": 0.3088235294117647,
						"acc_norm_stderr,none": 0.03242661719827218,
						"acc_stderr,none": 0.03242661719827218,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.25146198830409355,
						"acc_norm,none": 0.25146198830409355,
						"acc_norm_stderr,none": 0.033275044238468436,
						"acc_stderr,none": 0.033275044238468436,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.03814280082617515,
						"acc_stderr,none": 0.03814280082617515,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.31654676258992803,
						"acc_norm,none": 0.31654676258992803,
						"acc_norm_stderr,none": 0.03959440284735793,
						"acc_stderr,none": 0.03959440284735793,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03750293003086744,
						"acc_stderr,none": 0.03750293003086744,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3128834355828221,
						"acc_norm,none": 0.3128834355828221,
						"acc_norm_stderr,none": 0.03642914578292404,
						"acc_stderr,none": 0.03642914578292404,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.034078261673374376,
						"acc_stderr,none": 0.034078261673374376,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.02874673063268137,
						"acc_stderr,none": 0.02874673063268137,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2676767676767677,
						"acc_norm,none": 0.2676767676767677,
						"acc_norm_stderr,none": 0.031544498882702866,
						"acc_stderr,none": 0.031544498882702866,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.42016806722689076,
						"acc_norm,none": 0.42016806722689076,
						"acc_norm_stderr,none": 0.03206183783236152,
						"acc_stderr,none": 0.03206183783236152,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.23043478260869565,
						"acc_norm,none": 0.23043478260869565,
						"acc_norm_stderr,none": 0.027827807522276156,
						"acc_stderr,none": 0.027827807522276156,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.3037037037037037,
						"acc_norm,none": 0.3037037037037037,
						"acc_norm_stderr,none": 0.03972552884785138,
						"acc_stderr,none": 0.03972552884785138,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3146853146853147,
						"acc_norm,none": 0.3146853146853147,
						"acc_norm_stderr,none": 0.0389707788151041,
						"acc_stderr,none": 0.0389707788151041,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.32954545454545453,
						"acc_norm,none": 0.32954545454545453,
						"acc_norm_stderr,none": 0.035532299023675745,
						"acc_stderr,none": 0.035532299023675745,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3288590604026846,
						"acc_norm,none": 0.3288590604026846,
						"acc_norm_stderr,none": 0.03861721178313762,
						"acc_stderr,none": 0.03861721178313762,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03744254928577061,
						"acc_stderr,none": 0.03744254928577061,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3220338983050847,
						"acc_norm,none": 0.3220338983050847,
						"acc_norm_stderr,none": 0.04319782230261343,
						"acc_stderr,none": 0.04319782230261343,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.29878048780487804,
						"acc_norm,none": 0.29878048780487804,
						"acc_norm_stderr,none": 0.03585166336909662,
						"acc_stderr,none": 0.03585166336909662,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2636363636363636,
						"acc_norm,none": 0.2636363636363636,
						"acc_norm_stderr,none": 0.04220224692971987,
						"acc_stderr,none": 0.04220224692971987,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3146853146853147,
						"acc_norm,none": 0.3146853146853147,
						"acc_norm_stderr,none": 0.038970778815104114,
						"acc_stderr,none": 0.038970778815104114,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2698412698412698,
						"acc_norm,none": 0.2698412698412698,
						"acc_norm_stderr,none": 0.03970158273235173,
						"acc_stderr,none": 0.03970158273235173,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.032530209055933366,
						"acc_stderr,none": 0.032530209055933366,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3488372093023256,
						"acc_norm,none": 0.3488372093023256,
						"acc_norm_stderr,none": 0.03644669348694787,
						"acc_stderr,none": 0.03644669348694787,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2944038929440389,
						"acc_norm,none": 0.2944038929440389,
						"acc_norm_stderr,none": 0.022509089804193687,
						"acc_stderr,none": 0.022509089804193687,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.397196261682243,
						"acc_norm,none": 0.397196261682243,
						"acc_norm_stderr,none": 0.033527466939507825,
						"acc_stderr,none": 0.033527466939507825,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3008130081300813,
						"acc_norm,none": 0.3008130081300813,
						"acc_norm_stderr,none": 0.04152073768551428,
						"acc_stderr,none": 0.04152073768551428,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3360655737704918,
						"acc_norm,none": 0.3360655737704918,
						"acc_norm_stderr,none": 0.04294196582481048,
						"acc_stderr,none": 0.04294196582481048,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.319047619047619,
						"acc_norm,none": 0.319047619047619,
						"acc_norm_stderr,none": 0.03224133248962466,
						"acc_stderr,none": 0.03224133248962466,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.31666666666666665,
						"acc_norm,none": 0.31666666666666665,
						"acc_norm_stderr,none": 0.03476890096393038,
						"acc_stderr,none": 0.03476890096393038,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.30687830687830686,
						"acc_norm,none": 0.30687830687830686,
						"acc_norm_stderr,none": 0.03363635410184865,
						"acc_stderr,none": 0.03363635410184865,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.28448275862068967,
						"acc_norm,none": 0.28448275862068967,
						"acc_norm_stderr,none": 0.04207160755584018,
						"acc_stderr,none": 0.04207160755584018,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3310344827586207,
						"acc_norm,none": 0.3310344827586207,
						"acc_norm_stderr,none": 0.039215453124671215,
						"acc_stderr,none": 0.039215453124671215,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.04622501635210239,
						"acc_stderr,none": 0.04622501635210239,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2742857142857143,
						"acc_norm,none": 0.2742857142857143,
						"acc_norm_stderr,none": 0.033822819375172945,
						"acc_stderr,none": 0.033822819375172945,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27488151658767773,
						"acc_norm,none": 0.27488151658767773,
						"acc_norm_stderr,none": 0.030808291124780323,
						"acc_stderr,none": 0.030808291124780323,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.26861702127659576,
						"acc_norm,none": 0.26861702127659576,
						"acc_norm_stderr,none": 0.022888827968077056,
						"acc_stderr,none": 0.022888827968077056,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.36637931034482757,
						"acc_norm,none": 0.36637931034482757,
						"acc_norm_stderr,none": 0.03170108710059699,
						"acc_stderr,none": 0.03170108710059699,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3620689655172414,
						"acc_norm,none": 0.3620689655172414,
						"acc_norm_stderr,none": 0.03653923615465969,
						"acc_stderr,none": 0.03653923615465969,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.0391545063041425,
						"acc_stderr,none": 0.0391545063041425,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3185840707964602,
						"acc_norm,none": 0.3185840707964602,
						"acc_norm_stderr,none": 0.031061820840326115,
						"acc_stderr,none": 0.031061820840326115,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.036085410115739666,
						"acc_stderr,none": 0.036085410115739666,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.3081081081081081,
						"acc_norm,none": 0.3081081081081081,
						"acc_norm_stderr,none": 0.034037822778343836,
						"acc_stderr,none": 0.034037822778343836,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.31952662721893493,
						"acc_norm,none": 0.31952662721893493,
						"acc_norm_stderr,none": 0.03597530251676527,
						"acc_stderr,none": 0.03597530251676527,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.32919254658385094,
						"acc_norm,none": 0.32919254658385094,
						"acc_norm_stderr,none": 0.03715043857896316,
						"acc_stderr,none": 0.03715043857896316,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.03675892481369823,
						"acc_stderr,none": 0.03675892481369823,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.18075435456524627,
						"mcc_stderr,none": 0.023554737325358507
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.03379976689896309,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4549418604651163,
						"likelihood_diff_stderr,none": 0.522085518739553,
						"pct_stereotype,none": 0.6262671437090042,
						"pct_stereotype_stderr,none": 0.06668513604990244
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.69268038163387,
						"likelihood_diff_stderr,none": 0.08667342481316458,
						"pct_stereotype,none": 0.6547406082289803,
						"pct_stereotype_stderr,none": 0.011613694085569929
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.065934065934066,
						"likelihood_diff_stderr,none": 0.38925559738842147,
						"pct_stereotype,none": 0.7142857142857143,
						"pct_stereotype_stderr,none": 0.04761904761904759
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.829545454545454,
						"likelihood_diff_stderr,none": 1.8269037958630563,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.911538461538462,
						"likelihood_diff_stderr,none": 0.6100724748390253,
						"pct_stereotype,none": 0.7538461538461538,
						"pct_stereotype_stderr,none": 0.05384615384615383
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.588671875,
						"likelihood_diff_stderr,none": 0.1588748817836969,
						"pct_stereotype,none": 0.621875,
						"pct_stereotype_stderr,none": 0.027150254412347148
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.587384259259259,
						"likelihood_diff_stderr,none": 0.2391792034594464,
						"pct_stereotype,none": 0.5972222222222222,
						"pct_stereotype_stderr,none": 0.03344887382997866
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.9288194444444446,
						"likelihood_diff_stderr,none": 0.3013648610412603,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.5423228346456694,
						"likelihood_diff_stderr,none": 0.15093521465577134,
						"pct_stereotype,none": 0.5767716535433071,
						"pct_stereotype_stderr,none": 0.021942461659821902
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.5585585585585586,
						"likelihood_diff_stderr,none": 0.3437416510035903,
						"pct_stereotype,none": 0.7297297297297297,
						"pct_stereotype_stderr,none": 0.042343213610845386
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.169354838709677,
						"likelihood_diff_stderr,none": 0.4383054812205372,
						"pct_stereotype,none": 0.9247311827956989,
						"pct_stereotype_stderr,none": 0.027505616493839195
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.275,
						"likelihood_diff_stderr,none": 0.2514182827127048,
						"pct_stereotype,none": 0.7,
						"pct_stereotype_stderr,none": 0.03333333333333337
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.21925313059034,
						"likelihood_diff_stderr,none": 0.07423227491325449,
						"pct_stereotype,none": 0.5974955277280859,
						"pct_stereotype_stderr,none": 0.011978865190858288
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.2430555555555554,
						"likelihood_diff_stderr,none": 0.29773883282930963,
						"pct_stereotype,none": 0.6,
						"pct_stereotype_stderr,none": 0.051929078688949845
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.6346153846153846,
						"likelihood_diff_stderr,none": 0.6684363838175218,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.890151515151516,
						"likelihood_diff_stderr,none": 0.4197252121639488,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.056159743502623156
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.774532710280374,
						"likelihood_diff_stderr,none": 0.13606287272427092,
						"pct_stereotype,none": 0.6137071651090342,
						"pct_stereotype_stderr,none": 0.027218484103343366
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.383399209486166,
						"likelihood_diff_stderr,none": 0.19179220050758536,
						"pct_stereotype,none": 0.4624505928853755,
						"pct_stereotype_stderr,none": 0.03140809482817245
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.579861111111111,
						"likelihood_diff_stderr,none": 0.43739033917656484,
						"pct_stereotype,none": 0.6527777777777778,
						"pct_stereotype_stderr,none": 0.056501146768529645
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.813858695652174,
						"likelihood_diff_stderr,none": 0.13783369740955786,
						"pct_stereotype,none": 0.5065217391304347,
						"pct_stereotype_stderr,none": 0.023336016041798573
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.4489130434782607,
						"likelihood_diff_stderr,none": 0.2590877602583149,
						"pct_stereotype,none": 0.782608695652174,
						"pct_stereotype_stderr,none": 0.038631448549506
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.532967032967033,
						"likelihood_diff_stderr,none": 0.3168355244611528,
						"pct_stereotype,none": 0.8021978021978022,
						"pct_stereotype_stderr,none": 0.041988952031962214
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.7075892857142856,
						"likelihood_diff_stderr,none": 0.26084039637415307,
						"pct_stereotype,none": 0.7040816326530612,
						"pct_stereotype_stderr,none": 0.032687383845057996
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.06988188976377953,
						"exact_match_stderr,none": 0.005657133257635411
					},
					"glue": {
						"acc,none": 0.6949299649350421,
						"acc_stderr,none": 0.05567191094265838,
						"alias": "glue",
						"f1,none": 0.692586478317217,
						"f1_stderr,none": 0.00016762713714190596,
						"mcc,none": 0.18075435456524627,
						"mcc_stderr,none": 0.0005548256504666373
					},
					"hellaswag": {
						"acc,none": 0.5413264289982075,
						"acc_norm,none": 0.7262497510456084,
						"acc_norm_stderr,none": 0.004449710700861743,
						"acc_stderr,none": 0.004972708369656542,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2301472711521802,
						"acc_norm,none": 0.2301472711521802,
						"acc_norm_stderr,none": 0.02580232986416977,
						"acc_stderr,none": 0.02580232986416977,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816508,
						"acc_stderr,none": 0.04229525846816508,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.013106173040661757,
						"acc_stderr,none": 0.013106173040661757,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.227,
						"acc_norm,none": 0.227,
						"acc_norm_stderr,none": 0.013253174964763912,
						"acc_stderr,none": 0.013253174964763912,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.238,
						"acc_norm,none": 0.238,
						"acc_norm_stderr,none": 0.013473586661967225,
						"acc_stderr,none": 0.013473586661967225,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.013772206565168543,
						"acc_stderr,none": 0.013772206565168543,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.017808806510137862,
						"acc_stderr,none": 0.017808806510137862,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.198,
						"acc_norm,none": 0.198,
						"acc_norm_stderr,none": 0.01260773393417529,
						"acc_stderr,none": 0.01260773393417529,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462623,
						"acc_stderr,none": 0.014078856992462623,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.012886662332274519,
						"acc_stderr,none": 0.012886662332274519,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.0326374172542057,
						"acc_stderr,none": 0.0326374172542057,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.013314551335935941,
						"acc_stderr,none": 0.013314551335935941,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.037095601705416294,
						"acc_stderr,none": 0.037095601705416294,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.045126085985421255,
						"acc_stderr,none": 0.045126085985421255,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.188,
						"acc_norm,none": 0.188,
						"acc_norm_stderr,none": 0.012361586015103763,
						"acc_stderr,none": 0.012361586015103763,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.232,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.013354937452281564,
						"acc_stderr,none": 0.013354937452281564,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.207,
						"acc_norm,none": 0.207,
						"acc_norm_stderr,none": 0.01281855355784396,
						"acc_stderr,none": 0.01281855355784396,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.01253523562331932,
						"acc_stderr,none": 0.01253523562331932,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.013569640199177455,
						"acc_stderr,none": 0.013569640199177455,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920831,
						"acc_stderr,none": 0.013512312258920831,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920828,
						"acc_stderr,none": 0.013512312258920828,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.01339490288966001,
						"acc_stderr,none": 0.01339490288966001,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.0416333199893227,
						"acc_stderr,none": 0.0416333199893227,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.212,
						"acc_norm,none": 0.212,
						"acc_norm_stderr,none": 0.012931481864938043,
						"acc_stderr,none": 0.012931481864938043,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.231,
						"acc_norm,none": 0.231,
						"acc_norm_stderr,none": 0.013334797216936435,
						"acc_stderr,none": 0.013334797216936435,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.213,
						"acc_norm,none": 0.213,
						"acc_norm_stderr,none": 0.012953717566737228,
						"acc_stderr,none": 0.012953717566737228,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.226,
						"acc_norm,none": 0.226,
						"acc_norm_stderr,none": 0.013232501619085341,
						"acc_stderr,none": 0.013232501619085341,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.201,
						"acc_norm,none": 0.201,
						"acc_norm_stderr,none": 0.012679107214617322,
						"acc_stderr,none": 0.012679107214617322,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.226,
						"acc_norm,none": 0.226,
						"acc_norm_stderr,none": 0.01323250161908534,
						"acc_stderr,none": 0.01323250161908534,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.017061951343248973,
						"acc_stderr,none": 0.017061951343248973,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.013912208651021355,
						"acc_stderr,none": 0.013912208651021355,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.228,
						"acc_norm,none": 0.228,
						"acc_norm_stderr,none": 0.013273740700804485,
						"acc_stderr,none": 0.013273740700804485,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.205,
						"acc_norm,none": 0.205,
						"acc_norm_stderr,none": 0.012772554096113116,
						"acc_stderr,none": 0.012772554096113116,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.01353152253451545,
						"acc_stderr,none": 0.01353152253451545,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.0440844002276808,
						"acc_stderr,none": 0.0440844002276808,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.025574048533225636,
						"acc_stderr,none": 0.025574048533225636,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986186,
						"acc_stderr,none": 0.014062601350986186,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.204,
						"acc_norm,none": 0.204,
						"acc_norm_stderr,none": 0.012749374359024391,
						"acc_stderr,none": 0.012749374359024391,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.013550631705555958,
						"acc_stderr,none": 0.013550631705555958,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.212,
						"acc_norm,none": 0.212,
						"acc_norm_stderr,none": 0.012931481864938019,
						"acc_stderr,none": 0.012931481864938019,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.01382541652689504,
						"acc_stderr,none": 0.01382541652689504,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.029601626330440604,
						"acc_stderr,none": 0.029601626330440604,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.01397996564514516,
						"acc_stderr,none": 0.01397996564514516,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5365051523788643,
						"acc_norm,none": 0.56,
						"acc_norm_stderr,none": 0.0004937875751502986,
						"acc_stderr,none": 0.0445137395986217,
						"alias": "kobest",
						"f1,none": 0.45937334179843003,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5484330484330484,
						"acc_stderr,none": 0.013285993885047974,
						"alias": " - kobest_boolq",
						"f1,none": 0.4600628900297951,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.015316971293620996,
						"alias": " - kobest_copa",
						"f1,none": 0.6243053405747226,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.436,
						"acc_norm,none": 0.56,
						"acc_norm_stderr,none": 0.02222133153414301,
						"acc_stderr,none": 0.0221989546414768,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.4322213204597591,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5491183879093199,
						"acc_stderr,none": 0.025004412942296047,
						"alias": " - kobest_sentineg",
						"f1,none": 0.4786012487802015,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4888888888888889,
						"acc_stderr,none": 0.014088017407699532,
						"alias": " - kobest_wic",
						"f1,none": 0.3324228712464007,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6985251309916554,
						"acc_stderr,none": 0.025782152988454606,
						"alias": "lambada",
						"perplexity,none": 4.04606534626777,
						"perplexity_stderr,none": 0.4020069035786625
					},
					"lambada_cloze": {
						"acc,none": 0.021637880846109063,
						"acc_stderr,none": 0.0077858189399822005,
						"alias": "lambada_cloze",
						"perplexity,none": 1171.9566712676258,
						"perplexity_stderr,none": 422.1475942547852
					},
					"lambada_multilingual": {
						"acc,none": 0.5383271880457986,
						"acc_stderr,none": 0.08681475823526331,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.927449833314707,
						"perplexity_stderr,none": 8.24091373962889
					},
					"lambada_openai": {
						"acc,none": 0.7488841451581603,
						"acc_stderr,none": 0.006041662455556337,
						"alias": " - lambada_openai",
						"perplexity,none": 3.26144280861378,
						"perplexity_stderr,none": 0.06329504499740238
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.036677663496992044,
						"acc_stderr,none": 0.0026187782113318184,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 331.8441783583291,
						"perplexity_stderr,none": 10.496540047120147
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42227828449446925,
						"acc_stderr,none": 0.006881304773376885,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.56538287780979,
						"perplexity_stderr,none": 1.916593357868694
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7490782068697845,
						"acc_stderr,none": 0.00604010996180077,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.260807935628407,
						"perplexity_stderr,none": 0.06316399474808944
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45546283718222397,
						"acc_stderr,none": 0.006938287769723249,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.634878372512766,
						"perplexity_stderr,none": 1.4125806311180917
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5499708907432563,
						"acc_stderr,none": 0.006931101003281444,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.385887514294964,
						"perplexity_stderr,none": 0.7972646275148493
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5148457209392587,
						"acc_stderr,none": 0.006962906440875389,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.79029246632759,
						"perplexity_stderr,none": 1.1588891100985936
					},
					"lambada_standard": {
						"acc,none": 0.648554240248399,
						"acc_stderr,none": 0.006651420096937584,
						"alias": " - lambada_standard",
						"perplexity,none": 4.831203854071099,
						"perplexity_stderr,none": 0.10479154814359125
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.006598098195226082,
						"acc_stderr,none": 0.0011279346898141831,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 2012.0691641769224,
						"perplexity_stderr,none": 58.125713068011784
					},
					"logiqa": {
						"acc,none": 0.23963133640552994,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.0177192477984583,
						"acc_stderr,none": 0.01674276693510142,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.24618320610687022,
						"acc_norm,none": 0.2830788804071247,
						"acc_norm_stderr,none": 0.011365834536036195,
						"acc_stderr,none": 0.010868610457495216,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.24958123953098826,
						"acc_norm,none": 0.2606365159128978,
						"acc_norm_stderr,none": 0.008036134931668057,
						"acc_stderr,none": 0.007922429819042542,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.403622113958907,
						"acc_stderr,none": 0.005049393808058099,
						"alias": "mc_taco",
						"f1,none": 0.5239665229520669,
						"f1_stderr,none": 0.005596591615660174
					},
					"medmcqa": {
						"acc,none": 0.3351661486971073,
						"acc_norm,none": 0.3351661486971073,
						"acc_norm_stderr,none": 0.007299525371661003,
						"acc_stderr,none": 0.007299525371661003,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.32285938727415553,
						"acc_norm,none": 0.32285938727415553,
						"acc_norm_stderr,none": 0.013109996765488855,
						"acc_stderr,none": 0.013109996765488855,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.3681099558467455,
						"acc_stderr,none": 0.07919491400008644,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.17,
						"acc_stderr,none": 0.03775251680686371,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.37037037037037035,
						"acc_stderr,none": 0.041716541613545426,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.34868421052631576,
						"acc_stderr,none": 0.03878139888797611,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3660377358490566,
						"acc_stderr,none": 0.029647813539365242,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3888888888888889,
						"acc_stderr,none": 0.04076663253918567,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621505,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.28901734104046245,
						"acc_stderr,none": 0.034564257450869995,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.04389869956808777,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.04943110704237102,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.03202563076101735,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.040493392977481404,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2896551724137931,
						"acc_stderr,none": 0.03780019230438014,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2830687830687831,
						"acc_stderr,none": 0.023201392938194978,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.30158730158730157,
						"acc_stderr,none": 0.04104947269903394,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.45483870967741935,
						"acc_stderr,none": 0.028327743091561067,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2512315270935961,
						"acc_stderr,none": 0.030516530732694436,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.49696969696969695,
						"acc_stderr,none": 0.03904272341431856,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4696969696969697,
						"acc_stderr,none": 0.03555804051763929,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5077720207253886,
						"acc_stderr,none": 0.036080032255696524,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3230769230769231,
						"acc_stderr,none": 0.02371088850197057,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2740740740740741,
						"acc_stderr,none": 0.027195934804085622,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.029597329730978082,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.26490066225165565,
						"acc_stderr,none": 0.03603038545360384,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.44954128440366975,
						"acc_stderr,none": 0.021327881417823363,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.19907407407407407,
						"acc_stderr,none": 0.02723229846269024,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.46078431372549017,
						"acc_stderr,none": 0.03498501649369527,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5021097046413502,
						"acc_stderr,none": 0.032546938018020076,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.43946188340807174,
						"acc_stderr,none": 0.03331092511038179,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.4351145038167939,
						"acc_stderr,none": 0.04348208051644858,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.35557917109458026,
						"acc_stderr,none": 0.07327843003509107,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.34710743801652894,
						"acc_stderr,none": 0.043457245702925335,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.35185185185185186,
						"acc_stderr,none": 0.046166311118017125,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.37423312883435583,
						"acc_stderr,none": 0.03802068102899615,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3125,
						"acc_stderr,none": 0.043994650575715215,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4563106796116505,
						"acc_stderr,none": 0.049318019942204146,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.5555555555555556,
						"acc_stderr,none": 0.03255326307272487,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001974,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5491698595146871,
						"acc_stderr,none": 0.017793297572699044,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3265895953757225,
						"acc_stderr,none": 0.025248264774242832,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.01424263007057489,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.39869281045751637,
						"acc_stderr,none": 0.028036092273891772,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.4193756034760219,
						"acc_stderr,none": 0.08151257562767407,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.48231511254019294,
						"acc_stderr,none": 0.02838032284907713,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.42592592592592593,
						"acc_stderr,none": 0.027513747284379428,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.26595744680851063,
						"acc_stderr,none": 0.026358065698880585,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.32333767926988266,
						"acc_stderr,none": 0.011946565758447214,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.36764705882352944,
						"acc_stderr,none": 0.029289413409403192,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.369281045751634,
						"acc_stderr,none": 0.01952431674486635,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4090909090909091,
						"acc_stderr,none": 0.04709306978661895,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2693877551020408,
						"acc_stderr,none": 0.02840125202902294,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3909652258693533,
						"acc_stderr,none": 0.06995180529186491,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5323383084577115,
						"acc_stderr,none": 0.035281314729336065,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3139866793529972,
						"acc_stderr,none": 0.0718850203775544,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3433734939759036,
						"acc_stderr,none": 0.03696584317010601,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.52046783625731,
						"acc_stderr,none": 0.0383161053282193,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7661742231278655,
						"acc_stderr,none": 0.004272547842263172,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7663751017087063,
						"acc_stderr,none": 0.004267577414181665,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7254901960784313,
						"acc_stderr,none": 0.022120630385010484,
						"alias": "mrpc",
						"f1,none": 0.8292682926829268,
						"f1_stderr,none": 0.015874705228313604
					},
					"multimedqa": {
						"acc,none": 0.363236337828247,
						"acc_norm,none": 0.33207782005896697,
						"acc_norm_stderr,none": 0.000101453693644316,
						"acc_stderr,none": 0.08674200919923794,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5187706270627063,
						"acc_stderr,none": 0.007176740499730073,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7081452236084045,
						"mrr_stderr,none": 0.01035688468231922,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4074492099322799,
						"r@2_stderr,none": 0.016516875508477045
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6598946593079557,
						"mrr_stderr,none": 0.010433839765490512,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4616252821670429,
						"r@2_stderr,none": 0.016757741478801033
					},
					"openbookqa": {
						"acc,none": 0.304,
						"acc_norm,none": 0.422,
						"acc_norm_stderr,none": 0.022109039310618556,
						"acc_stderr,none": 0.020591649571224932,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4325,
						"acc_stderr,none": 0.01108076110352171,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3705,
						"acc_stderr,none": 0.010801537464907352,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4105,
						"acc_stderr,none": 0.01100251801640663,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5475,
						"acc_stderr,none": 0.011132557743886098,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.541,
						"acc_stderr,none": 0.011145474902641254,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5335,
						"acc_stderr,none": 0.011158007239770807,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.011170245619215438,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4799285714285714,
						"acc_stderr,none": 0.056738546899646254,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7693144722524483,
						"acc_norm,none": 0.7812840043525572,
						"acc_norm_stderr,none": 0.009644731932667558,
						"acc_stderr,none": 0.009828959550983079,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2503202391118702,
						"acc_norm,none": 0.2767933390264731,
						"acc_norm_stderr,none": 0.003268753546278482,
						"acc_stderr,none": 0.003164892964720327,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.02062956999834541,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.733359115336392,
						"acc_norm,none": 0.6243752672010469,
						"acc_norm_stderr,none": 0.00926095035495321,
						"acc_stderr,none": 0.15745562639614497,
						"alias": "pythia",
						"bits_per_byte,none": 0.63504530762049,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5529865205440583,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.26144280861378,
						"perplexity_stderr,none": 0.06329504499740238,
						"word_perplexity,none": 10.525912748850747,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.39361702127659576,
						"acc_norm,none": 0.4450354609929078,
						"acc_norm_stderr,none": 0.06457916699277962,
						"acc_stderr,none": 0.05181708791288234,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.48333333333333334,
						"acc_norm,none": 0.575,
						"acc_norm_stderr,none": 0.04531634835874828,
						"acc_stderr,none": 0.04580945392704764,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.35,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.03920394987159571,
						"acc_stderr,none": 0.0378261498181204,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.38028169014084506,
						"acc_norm,none": 0.4014084507042254,
						"acc_norm_stderr,none": 0.029138375022747656,
						"acc_stderr,none": 0.028857363751758302,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.6872619342072718,
						"acc_stderr,none": 0.0023057117793409223,
						"alias": "qqp",
						"f1,none": 0.6909766350571903,
						"f1_stderr,none": 0.00261475204280303
					},
					"race": {
						"acc,none": 0.3521531100478469,
						"acc_stderr,none": 0.01478262989720226,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2722,
						"em_stderr,none": 0.004451145613179882,
						"f1,none": 0.28219857167601586,
						"f1_stderr,none": 0.0044615110959990364
					},
					"rte": {
						"acc,none": 0.7075812274368231,
						"acc_stderr,none": 0.02738017597257561,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.947,
						"acc_norm,none": 0.927,
						"acc_norm_stderr,none": 0.008230354715244073,
						"acc_stderr,none": 0.007088105617246438,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.703971119133574,
						"acc_stderr,none": 0.027478303862979354,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.9059633027522935,
						"acc_stderr,none": 0.009889968523888345,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5828251524542637,
						"acc_norm,none": 0.770518844346696,
						"acc_norm_stderr,none": 0.0029730065675431993,
						"acc_stderr,none": 0.003486253177229567,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6480982330038934,
						"acc_stderr,none": 0.06013900935873594,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5967548076923077,
						"acc_stderr,none": 0.0049096668856467756,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8079456775108949,
						"acc_stderr,none": 0.0039658160713191,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5437254901960784,
						"acc_stderr,none": 0.004932012338899769,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.32801648606823786,
						"acc_stderr,none": 0.0016111397392200954,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.016339170373280906,
						"bleu_diff,none": -6.449876019468411,
						"bleu_diff_stderr,none": 0.8648441827511708,
						"bleu_max,none": 26.725615443705127,
						"bleu_max_stderr,none": 0.7972052706762408,
						"rouge1_acc,none": 0.2876376988984088,
						"rouge1_acc_stderr,none": 0.01584631510139481,
						"rouge1_diff,none": -8.362910310533742,
						"rouge1_diff_stderr,none": 0.9393731907326451,
						"rouge1_max,none": 52.12546817223328,
						"rouge1_max_stderr,none": 0.8616366398506576,
						"rouge2_acc,none": 0.2631578947368421,
						"rouge2_acc_stderr,none": 0.015415241740237009,
						"rouge2_diff,none": -9.90803343683518,
						"rouge2_diff_stderr,none": 1.1441643043034864,
						"rouge2_max,none": 36.32378129064672,
						"rouge2_max_stderr,none": 1.0106546403584376,
						"rougeL_acc,none": 0.2937576499388005,
						"rougeL_acc_stderr,none": 0.015945068581236614,
						"rougeL_diff,none": -8.488140323054756,
						"rougeL_diff_stderr,none": 0.9590589495564945,
						"rougeL_max,none": 49.359296251685706,
						"rougeL_max_stderr,none": 0.8794222392570263
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.016339170373280906,
						"bleu_diff,none": -6.449876019468411,
						"bleu_diff_stderr,none": 0.8648441827511708,
						"bleu_max,none": 26.725615443705127,
						"bleu_max_stderr,none": 0.7972052706762408,
						"rouge1_acc,none": 0.2876376988984088,
						"rouge1_acc_stderr,none": 0.01584631510139481,
						"rouge1_diff,none": -8.362910310533742,
						"rouge1_diff_stderr,none": 0.9393731907326451,
						"rouge1_max,none": 52.12546817223328,
						"rouge1_max_stderr,none": 0.8616366398506576,
						"rouge2_acc,none": 0.2631578947368421,
						"rouge2_acc_stderr,none": 0.015415241740237009,
						"rouge2_diff,none": -9.90803343683518,
						"rouge2_diff_stderr,none": 1.1441643043034864,
						"rouge2_max,none": 36.32378129064672,
						"rouge2_max_stderr,none": 1.0106546403584376,
						"rougeL_acc,none": 0.2937576499388005,
						"rougeL_acc_stderr,none": 0.015945068581236614,
						"rougeL_diff,none": -8.488140323054756,
						"rougeL_diff_stderr,none": 0.9590589495564945,
						"rougeL_max,none": 49.359296251685706,
						"rougeL_max_stderr,none": 0.8794222392570263
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2533659730722154,
						"acc_stderr,none": 0.015225899340826845,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.40266699906426034,
						"acc_stderr,none": 0.014235930398919306,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.07086614173228346,
						"exact_match_stderr,none": 0.005693817930305433
					},
					"wic": {
						"acc,none": 0.5297805642633229,
						"acc_stderr,none": 0.019775550529171206,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6350296568587683,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.552969673400715,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.525302151530585,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7213891081294396,
						"acc_stderr,none": 0.01259989664949388,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.05961305784972239,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.3942307692307692,
						"acc_stderr,none": 0.04815154775990711,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8827838827838828,
						"acc_stderr,none": 0.01950457139863538,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6221818181818181,
						"acc_stderr,none": 0.06802681894210574,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.021834685869369205,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.02234794983266809,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.02018670369357085,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.742,
						"acc_stderr,none": 0.019586711785215837,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.022374298166353185,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.02227969410784342,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.022033677993740862,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.632,
						"acc_stderr,none": 0.02158898256835354,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.02047511809298897,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.020740596536488076,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4387951807228916,
						"acc_stderr,none": 0.05134050124375675,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.336144578313253,
						"acc_stderr,none": 0.009468634669293527,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4863453815261044,
						"acc_stderr,none": 0.010018334967148556,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.485140562248996,
						"acc_stderr,none": 0.01001764608425538,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38755020080321284,
						"acc_stderr,none": 0.00976532683221899,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5305220883534136,
						"acc_stderr,none": 0.01000338235531474,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5076305220883535,
						"acc_stderr,none": 0.01002090573154232,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4959839357429719,
						"acc_stderr,none": 0.010021749574555898,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42088353413654617,
						"acc_stderr,none": 0.009895812914052197,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4931726907630522,
						"acc_stderr,none": 0.010021138522919163,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39959839357429716,
						"acc_stderr,none": 0.009817939267958271,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40963855421686746,
						"acc_stderr,none": 0.009857049962123561,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4497991967871486,
						"acc_stderr,none": 0.009971431255560173,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40602409638554215,
						"acc_stderr,none": 0.009843462007384231,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42650602409638555,
						"acc_stderr,none": 0.009913215943570535,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3469879518072289,
						"acc_stderr,none": 0.009541251561568397,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6281210516816076,
						"acc_stderr,none": 0.06241507928658257,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5936465916611515,
						"acc_stderr,none": 0.012639429420389868,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7842488418266049,
						"acc_stderr,none": 0.01058558922710119,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7094639311714097,
						"acc_stderr,none": 0.011683600935499847,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5605559232296492,
						"acc_stderr,none": 0.01277240869797914,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6035737921906023,
						"acc_stderr,none": 0.012588033568434754,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6618133686300464,
						"acc_stderr,none": 0.012174678796437402,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5446724023825281,
						"acc_stderr,none": 0.01281566654206729,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6823295830575777,
						"acc_stderr,none": 0.011981108837175406,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5506287227001986,
						"acc_stderr,none": 0.012800991591293373,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.586366644606221,
						"acc_stderr,none": 0.012673714851823772,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6320317670416943,
						"acc_stderr,none": 0.012410410404915704,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8174870757473589,
						"acc_stderr,none": 0.03902273081363968,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8812903225806452,
						"acc_stderr,none": 0.006709412618684158,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6867469879518072,
						"acc_stderr,none": 0.05121994210658146,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7507820646506778,
						"acc_stderr,none": 0.013975386806002533,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.779467680608365,
						"acc_stderr,none": 0.025614420399944934,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6793650793650794,
						"acc_stderr,none": 0.02633857021981404,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7777777777777778,
						"acc_stderr,none": 0.01853691744855944,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C0-PREPRERUN-rwkv-40_pth"
	},
	"./rwkv-x-dev/1_3-C0-PRERUN-rwkv-30_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6271138669673055,
						"acc_norm,none": 0.6220405862457723,
						"acc_norm_stderr,none": 0.08982265247507755,
						"acc_stderr,none": 0.10772226861176627,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4403125,
						"acc_stderr,none": 0.03961615124240837,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.1209,
						"acc_stderr,none": 0.09780675540899796,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8122835820895521,
						"acc_stderr,none": 0.1661557766387033,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.25928677563150077,
						"acc_norm,none": 0.25928677563150077,
						"acc_norm_stderr,none": 0.1155882587095293,
						"acc_stderr,none": 0.1155882587095293,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.28414781557589364,
						"acc_norm,none": 0.28414781557589364,
						"acc_norm_stderr,none": 0.04690889751458147,
						"acc_stderr,none": 0.04690889751458147,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.438534212880143,
						"likelihood_diff_stderr,none": 0.5186654021909625,
						"pct_stereotype,none": 0.6197078115682767,
						"pct_stereotype_stderr,none": 0.06456604537313038
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0625,
						"exact_match_stderr,none": 0.005371192100365162
					},
					"glue": {
						"acc,none": 0.699782634587899,
						"acc_stderr,none": 0.003343112236944854,
						"alias": "glue",
						"f1,none": 0.6924907010759175,
						"f1_stderr,none": 0.0001725049726930762,
						"mcc,none": 0.13847035035325458,
						"mcc_stderr,none": 0.0299984185715153
					},
					"kmmlu": {
						"acc,none": 0.23742419867167197,
						"acc_norm,none": 0.23742419867167197,
						"acc_norm_stderr,none": 0.024828600775311533,
						"acc_stderr,none": 0.024828600775311533,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5229116421837317,
						"acc_norm,none": 0.564,
						"acc_norm_stderr,none": 0.0004927935871743445,
						"acc_stderr,none": 0.04373292368970908,
						"alias": "kobest",
						"f1,none": 0.42457673487344283,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7039588589171356,
						"acc_stderr,none": 0.021849116254084748,
						"alias": "lambada",
						"perplexity,none": 3.9333502109461658,
						"perplexity_stderr,none": 0.32907151864962975
					},
					"lambada_cloze": {
						"acc,none": 0.025907238501843587,
						"acc_stderr,none": 0.00811590120467675,
						"alias": "lambada_cloze",
						"perplexity,none": 837.7205119402188,
						"perplexity_stderr,none": 297.2538701450135
					},
					"lambada_multilingual": {
						"acc,none": 0.5345235784979624,
						"acc_stderr,none": 0.08661226265525572,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.44996767123558,
						"perplexity_stderr,none": 8.507966961825264
					},
					"mmlu": {
						"acc,none": 0.33784361202107954,
						"acc_stderr,none": 0.06437119533451122,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.31689691817215726,
						"acc_stderr,none": 0.055469582333788285,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.39233987769552625,
						"acc_stderr,none": 0.059471915432580055,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3558661033474163,
						"acc_stderr,none": 0.056548174472976435,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.29781160799238815,
						"acc_stderr,none": 0.06628074459375635,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.351596877217885,
						"acc_norm,none": 0.3117438820132781,
						"acc_norm_stderr,none": 0.00010859886158936637,
						"acc_stderr,none": 0.09800131879497548,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.48292857142857143,
						"acc_stderr,none": 0.05630172097444685,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7257269633525867,
						"acc_norm,none": 0.6261037330259638,
						"acc_norm_stderr,none": 0.010042079813284158,
						"acc_stderr,none": 0.1553317186464892,
						"alias": "pythia",
						"bits_per_byte,none": 0.6350408359742838,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5529817070558825,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.29603535071706,
						"perplexity_stderr,none": 0.06411833060497432,
						"word_perplexity,none": 10.525738288859136,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.38120567375886527,
						"acc_norm,none": 0.4592198581560284,
						"acc_norm_stderr,none": 0.05704819939414727,
						"acc_stderr,none": 0.04475763006031279,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6208445642407907,
						"acc_stderr,none": 0.062455828774531034,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.313007484837158,
						"acc_stderr,none": 0.0014575418233732438,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -8.36280859898459,
						"bleu_diff_stderr,none": 0.870727832787657,
						"bleu_max,none": 27.056127303161382,
						"bleu_max_stderr,none": 0.8066576080415484,
						"rouge1_acc,none": 0.2668298653610771,
						"rouge1_acc_stderr,none": 0.015483691939237255,
						"rouge1_diff,none": -10.693641183782578,
						"rouge1_diff_stderr,none": 0.9249413681972198,
						"rouge1_max,none": 52.269419787636295,
						"rouge1_max_stderr,none": 0.875592158608712,
						"rouge2_acc,none": 0.2460220318237454,
						"rouge2_acc_stderr,none": 0.015077219200662594,
						"rouge2_diff,none": -12.81645946294325,
						"rouge2_diff_stderr,none": 1.1305558228758767,
						"rouge2_max,none": 36.11351432874707,
						"rouge2_max_stderr,none": 1.0227982672085405,
						"rougeL_acc,none": 0.2668298653610771,
						"rougeL_acc_stderr,none": 0.015483691939237256,
						"rougeL_diff,none": -11.0057879493879,
						"rougeL_diff_stderr,none": 0.9394494459238267,
						"rougeL_max,none": 49.41196639702691,
						"rougeL_max_stderr,none": 0.8938359500531418
					},
					"xcopa": {
						"acc,none": 0.6192727272727273,
						"acc_stderr,none": 0.06941891891331842,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.441285140562249,
						"acc_stderr,none": 0.049945032815202337,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6243306660249083,
						"acc_stderr,none": 0.060999206837165164,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.814789840413576,
						"acc_stderr,none": 0.03771108790086885,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6271138669673055,
						"acc_norm,none": 0.6220405862457723,
						"acc_norm_stderr,none": 0.08982265247507755,
						"acc_stderr,none": 0.10772226861176627,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4403125,
						"acc_stderr,none": 0.03961615124240837,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.015809045699406728,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.421,
						"acc_stderr,none": 0.015620595475301324,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.39166666666666666,
						"acc_stderr,none": 0.014096766997219156,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3993174061433447,
						"acc_norm,none": 0.4325938566552901,
						"acc_norm_stderr,none": 0.014478005694182533,
						"acc_stderr,none": 0.01431209455794669,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7394781144781145,
						"acc_norm,none": 0.7154882154882155,
						"acc_norm_stderr,none": 0.009258050925618818,
						"acc_stderr,none": 0.009006435890336593,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.1209,
						"acc_stderr,none": 0.09780675540899796,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0535,
						"acc_stderr,none": 0.005033044880625058,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.01085628525162897,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.1515,
						"acc_stderr,none": 0.008019103940840797,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.010042431240123247,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0555,
						"acc_stderr,none": 0.005120838456077828,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0865,
						"acc_stderr,none": 0.006287180554084618,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0195,
						"acc_stderr,none": 0.0030926780189124135,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.1005,
						"acc_stderr,none": 0.0067247666311270465,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0245,
						"acc_stderr,none": 0.003457723662536252,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.06,
						"acc_stderr,none": 0.005311695308799976,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0039045553145336228,
						"acc_stderr,none": 0.0012992568927018666,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8122835820895521,
						"acc_stderr,none": 0.1661557766387033,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866442,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611487,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.002818500300504508,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.0111717862854965,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074794,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.012886662332274553,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.569,
						"acc_stderr,none": 0.0156679444881735,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.01110798754893915,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849879,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987286,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.003444977194099813,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.00779973306183202,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406112,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033841,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785143,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244062,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727073,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336666,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.666,
						"acc_stderr,none": 0.014922019523732963,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.68,
						"acc_stderr,none": 0.014758652303574874,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.733,
						"acc_stderr,none": 0.013996674851796266,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639239,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849877,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.0047427305946568,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.348,
						"acc_stderr,none": 0.01507060460376841,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662753,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.813,
						"acc_stderr,none": 0.012336254828074133,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.015625625112620667,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.705,
						"acc_stderr,none": 0.014428554438445514,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592078,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381791,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753653,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.677,
						"acc_stderr,none": 0.014794927843348633,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.00949157995752504,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.363,
						"acc_stderr,none": 0.015213890444671283,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.015524980677122581,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.513,
						"acc_stderr,none": 0.015813952101896633,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.013877773329774166,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.517,
						"acc_stderr,none": 0.015810153729833427,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.00969892102602497,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151112,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.754,
						"acc_stderr,none": 0.013626065817750636,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333371,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448843,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592074,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.01107581480856704,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.015680876566375058,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.0075720760915574245,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855754,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.00442940398017836,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.01455320568795042,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.434,
						"acc_stderr,none": 0.015680876566375054,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823335,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666689,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.609,
						"acc_stderr,none": 0.01543882629468178,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.011107987548939149,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.703,
						"acc_stderr,none": 0.014456832294801098,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.011171786285496496,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406097,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796394,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.0040899544896891024,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274702,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.366,
						"acc_stderr,none": 0.015240612726405754,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.306,
						"acc_stderr,none": 0.014580006055436962,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.708868501529052,
						"acc_stderr,none": 0.007945477040745953,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8214285714285714,
						"acc_stderr,none": 0.05164277182008721,
						"alias": "cb",
						"f1,none": 0.6738505747126436,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.25928677563150077,
						"acc_norm,none": 0.25928677563150077,
						"acc_norm_stderr,none": 0.1155882587095293,
						"acc_stderr,none": 0.1155882587095293,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966263,
						"acc_stderr,none": 0.06520506636966263,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.42424242424242425,
						"acc_norm,none": 0.42424242424242425,
						"acc_norm_stderr,none": 0.08736789844447573,
						"acc_stderr,none": 0.08736789844447573,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757577,
						"acc_stderr,none": 0.07575757575757577,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.19148936170212766,
						"acc_norm,none": 0.19148936170212766,
						"acc_norm_stderr,none": 0.05801446334976932,
						"acc_stderr,none": 0.05801446334976932,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.052486388108147784,
						"acc_stderr,none": 0.052486388108147784,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.11967838846954226,
						"acc_stderr,none": 0.11967838846954226,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.08742975048915692,
						"acc_stderr,none": 0.08742975048915692,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.06861056852129647,
						"acc_stderr,none": 0.06861056852129647,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.07988892740217941,
						"acc_stderr,none": 0.07988892740217941,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.07335878043508444,
						"acc_stderr,none": 0.07335878043508444,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.3695652173913043,
						"acc_norm,none": 0.3695652173913043,
						"acc_norm_stderr,none": 0.07195473383945741,
						"acc_stderr,none": 0.07195473383945741,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.28414781557589364,
						"acc_norm,none": 0.28414781557589364,
						"acc_norm_stderr,none": 0.04690889751458147,
						"acc_stderr,none": 0.04690889751458147,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.27218934911242604,
						"acc_norm,none": 0.27218934911242604,
						"acc_norm_stderr,none": 0.034339196275485345,
						"acc_stderr,none": 0.034339196275485345,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.28378378378378377,
						"acc_norm,none": 0.28378378378378377,
						"acc_norm_stderr,none": 0.037184093212853736,
						"acc_stderr,none": 0.037184093212853736,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.03839344480212195,
						"acc_stderr,none": 0.03839344480212195,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03453131801885415,
						"acc_stderr,none": 0.03453131801885415,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.03223012819451555,
						"acc_stderr,none": 0.03223012819451555,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.035156741348767645,
						"acc_stderr,none": 0.035156741348767645,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2824427480916031,
						"acc_norm,none": 0.2824427480916031,
						"acc_norm_stderr,none": 0.03948406125768361,
						"acc_stderr,none": 0.03948406125768361,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3088235294117647,
						"acc_norm,none": 0.3088235294117647,
						"acc_norm_stderr,none": 0.03976333292288875,
						"acc_stderr,none": 0.03976333292288875,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2897196261682243,
						"acc_norm,none": 0.2897196261682243,
						"acc_norm_stderr,none": 0.0440606533474851,
						"acc_stderr,none": 0.0440606533474851,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3065015479876161,
						"acc_norm,none": 0.3065015479876161,
						"acc_norm_stderr,none": 0.025692782965007776,
						"acc_stderr,none": 0.025692782965007776,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.03166009679399813,
						"acc_stderr,none": 0.03166009679399813,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.29608938547486036,
						"acc_norm,none": 0.29608938547486036,
						"acc_norm_stderr,none": 0.03421843754304871,
						"acc_stderr,none": 0.03421843754304871,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.23628691983122363,
						"acc_norm,none": 0.23628691983122363,
						"acc_norm_stderr,none": 0.02765215314415927,
						"acc_stderr,none": 0.02765215314415927,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.041988576623712234,
						"acc_stderr,none": 0.041988576623712234,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.35514018691588783,
						"acc_norm,none": 0.35514018691588783,
						"acc_norm_stderr,none": 0.04648144634449114,
						"acc_stderr,none": 0.04648144634449114,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2037037037037037,
						"acc_norm,none": 0.2037037037037037,
						"acc_norm_stderr,none": 0.03893542518824847,
						"acc_stderr,none": 0.03893542518824847,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055045,
						"acc_stderr,none": 0.04232473532055045,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.04302548773959011,
						"acc_stderr,none": 0.04302548773959011,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.26373626373626374,
						"acc_norm,none": 0.26373626373626374,
						"acc_norm_stderr,none": 0.026718814072967532,
						"acc_stderr,none": 0.026718814072967532,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.27941176470588236,
						"acc_norm,none": 0.27941176470588236,
						"acc_norm_stderr,none": 0.03149328104507955,
						"acc_stderr,none": 0.03149328104507955,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2573099415204678,
						"acc_norm,none": 0.2573099415204678,
						"acc_norm_stderr,none": 0.03352799844161865,
						"acc_stderr,none": 0.03352799844161865,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.29931972789115646,
						"acc_norm,none": 0.29931972789115646,
						"acc_norm_stderr,none": 0.03790104530910391,
						"acc_stderr,none": 0.03790104530910391,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2733812949640288,
						"acc_norm,none": 0.2733812949640288,
						"acc_norm_stderr,none": 0.037940071215336206,
						"acc_stderr,none": 0.037940071215336206,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3081761006289308,
						"acc_norm,none": 0.3081761006289308,
						"acc_norm_stderr,none": 0.03673404171124562,
						"acc_stderr,none": 0.03673404171124562,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3312883435582822,
						"acc_norm,none": 0.3312883435582822,
						"acc_norm_stderr,none": 0.03697983910025588,
						"acc_stderr,none": 0.03697983910025588,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.03336605189761064,
						"acc_stderr,none": 0.03336605189761064,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.028514456573421432,
						"acc_stderr,none": 0.028514456573421432,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03173071239071724,
						"acc_stderr,none": 0.03173071239071724,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3865546218487395,
						"acc_norm,none": 0.3865546218487395,
						"acc_norm_stderr,none": 0.0316314580755238,
						"acc_stderr,none": 0.0316314580755238,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.20869565217391303,
						"acc_norm,none": 0.20869565217391303,
						"acc_norm_stderr,none": 0.026854108265439682,
						"acc_stderr,none": 0.026854108265439682,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.037857144650666544,
						"acc_stderr,none": 0.037857144650666544,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695624,
						"acc_stderr,none": 0.03737392962695624,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.30113636363636365,
						"acc_norm,none": 0.30113636363636365,
						"acc_norm_stderr,none": 0.03467837977202437,
						"acc_stderr,none": 0.03467837977202437,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.28187919463087246,
						"acc_norm,none": 0.28187919463087246,
						"acc_norm_stderr,none": 0.036982767559851006,
						"acc_stderr,none": 0.036982767559851006,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331528,
						"acc_stderr,none": 0.03360300796331528,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714263,
						"acc_stderr,none": 0.04025566684714263,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2804878048780488,
						"acc_norm,none": 0.2804878048780488,
						"acc_norm_stderr,none": 0.03518700228801578,
						"acc_stderr,none": 0.03518700228801578,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.04069306319721376,
						"acc_stderr,none": 0.04069306319721376,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695623,
						"acc_stderr,none": 0.03737392962695623,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.04006168083848876,
						"acc_stderr,none": 0.04006168083848876,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.032530209055933366,
						"acc_stderr,none": 0.032530209055933366,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.0345162887625062,
						"acc_stderr,none": 0.0345162887625062,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.27007299270072993,
						"acc_norm,none": 0.27007299270072993,
						"acc_norm_stderr,none": 0.021927461972871168,
						"acc_stderr,none": 0.021927461972871168,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.35046728971962615,
						"acc_norm,none": 0.35046728971962615,
						"acc_norm_stderr,none": 0.03269147055032477,
						"acc_stderr,none": 0.03269147055032477,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.04119323030208567,
						"acc_stderr,none": 0.04119323030208567,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3114754098360656,
						"acc_norm,none": 0.3114754098360656,
						"acc_norm_stderr,none": 0.04209969267310141,
						"acc_stderr,none": 0.04209969267310141,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.28095238095238095,
						"acc_norm,none": 0.28095238095238095,
						"acc_norm_stderr,none": 0.031090094469344617,
						"acc_stderr,none": 0.031090094469344617,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.29444444444444445,
						"acc_norm,none": 0.29444444444444445,
						"acc_norm_stderr,none": 0.03406754001349689,
						"acc_stderr,none": 0.03406754001349689,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.291005291005291,
						"acc_norm,none": 0.291005291005291,
						"acc_norm_stderr,none": 0.033127832003565706,
						"acc_stderr,none": 0.033127832003565706,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.041265147363241,
						"acc_stderr,none": 0.041265147363241,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.25517241379310346,
						"acc_norm,none": 0.25517241379310346,
						"acc_norm_stderr,none": 0.03632984052707842,
						"acc_stderr,none": 0.03632984052707842,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.04622501635210239,
						"acc_stderr,none": 0.04622501635210239,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.03519324354579657,
						"acc_stderr,none": 0.03519324354579657,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767245,
						"acc_stderr,none": 0.030113040167767245,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.02251703243459229,
						"acc_stderr,none": 0.02251703243459229,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.33189655172413796,
						"acc_norm,none": 0.33189655172413796,
						"acc_norm_stderr,none": 0.03098255553570088,
						"acc_stderr,none": 0.03098255553570088,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3045977011494253,
						"acc_norm,none": 0.3045977011494253,
						"acc_norm_stderr,none": 0.03499115838809175,
						"acc_stderr,none": 0.03499115838809175,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800254,
						"acc_stderr,none": 0.03885004245800254,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.29646017699115046,
						"acc_norm,none": 0.29646017699115046,
						"acc_norm_stderr,none": 0.030446422190794627,
						"acc_stderr,none": 0.030446422190794627,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.035886248000917095,
						"acc_stderr,none": 0.035886248000917095,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.03253020905593337,
						"acc_stderr,none": 0.03253020905593337,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2919254658385093,
						"acc_norm,none": 0.2919254658385093,
						"acc_norm_stderr,none": 0.035943096367164036,
						"acc_stderr,none": 0.035943096367164036,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2875,
						"acc_norm,none": 0.2875,
						"acc_norm_stderr,none": 0.035893251060583956,
						"acc_stderr,none": 0.035893251060583956,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.13847035035325458,
						"mcc_stderr,none": 0.0299984185715153
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.032659863237109066,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.438534212880143,
						"likelihood_diff_stderr,none": 0.5186654021909625,
						"pct_stereotype,none": 0.6197078115682767,
						"pct_stereotype_stderr,none": 0.06456604537313038
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.673449612403101,
						"likelihood_diff_stderr,none": 0.08708306931731492,
						"pct_stereotype,none": 0.6392367322599881,
						"pct_stereotype_stderr,none": 0.011730189016435891
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.1401098901098905,
						"likelihood_diff_stderr,none": 0.38822249969255185,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.04865042554105199
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.7272727272727275,
						"likelihood_diff_stderr,none": 1.8902274230218443,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.973076923076923,
						"likelihood_diff_stderr,none": 0.6054917710120541,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.582421875,
						"likelihood_diff_stderr,none": 0.16038125459815436,
						"pct_stereotype,none": 0.628125,
						"pct_stereotype_stderr,none": 0.02705990013900488
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.5706018518518516,
						"likelihood_diff_stderr,none": 0.24247329615613597,
						"pct_stereotype,none": 0.5648148148148148,
						"pct_stereotype_stderr,none": 0.03381200005643525
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.8055555555555554,
						"likelihood_diff_stderr,none": 0.3083833793218684,
						"pct_stereotype,none": 0.7361111111111112,
						"pct_stereotype_stderr,none": 0.05230618728513982
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.547982283464567,
						"likelihood_diff_stderr,none": 0.15262839907856485,
						"pct_stereotype,none": 0.5570866141732284,
						"pct_stereotype_stderr,none": 0.022060572810922933
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.550675675675676,
						"likelihood_diff_stderr,none": 0.337575553386818,
						"pct_stereotype,none": 0.7297297297297297,
						"pct_stereotype_stderr,none": 0.042343213610845386
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.071236559139785,
						"likelihood_diff_stderr,none": 0.4325291044851275,
						"pct_stereotype,none": 0.8924731182795699,
						"pct_stereotype_stderr,none": 0.03229700003364
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.166447368421053,
						"likelihood_diff_stderr,none": 0.25409151083389725,
						"pct_stereotype,none": 0.6842105263157895,
						"pct_stereotype_stderr,none": 0.03381137233892748
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.203972868217054,
						"likelihood_diff_stderr,none": 0.07399945660195557,
						"pct_stereotype,none": 0.6010733452593918,
						"pct_stereotype_stderr,none": 0.011961158770774022
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.223611111111111,
						"likelihood_diff_stderr,none": 0.31175782307473976,
						"pct_stereotype,none": 0.5777777777777777,
						"pct_stereotype_stderr,none": 0.05235473399540657
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.980769230769231,
						"likelihood_diff_stderr,none": 0.7186066124875613,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.886363636363637,
						"likelihood_diff_stderr,none": 0.43268224188334226,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.056159743502623156
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.77297507788162,
						"likelihood_diff_stderr,none": 0.1342166279942476,
						"pct_stereotype,none": 0.5794392523364486,
						"pct_stereotype_stderr,none": 0.027595821988359096
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.3680830039525693,
						"likelihood_diff_stderr,none": 0.19543601224923757,
						"pct_stereotype,none": 0.45849802371541504,
						"pct_stereotype_stderr,none": 0.031388350000151964
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.576388888888889,
						"likelihood_diff_stderr,none": 0.44056019190340723,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.05594542388644592
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.7456521739130433,
						"likelihood_diff_stderr,none": 0.13749140016934094,
						"pct_stereotype,none": 0.5195652173913043,
						"pct_stereotype_stderr,none": 0.023320127087608274
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.4717391304347824,
						"likelihood_diff_stderr,none": 0.26407310260993566,
						"pct_stereotype,none": 0.808695652173913,
						"pct_stereotype_stderr,none": 0.03683855178508145
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.7362637362637363,
						"likelihood_diff_stderr,none": 0.3234893188315291,
						"pct_stereotype,none": 0.8241758241758241,
						"pct_stereotype_stderr,none": 0.040126194689023176
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.670918367346939,
						"likelihood_diff_stderr,none": 0.24429113561524332,
						"pct_stereotype,none": 0.7193877551020408,
						"pct_stereotype_stderr,none": 0.032174923577801474
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0625,
						"exact_match_stderr,none": 0.005371192100365162
					},
					"glue": {
						"acc,none": 0.699782634587899,
						"acc_stderr,none": 0.003343112236944854,
						"alias": "glue",
						"f1,none": 0.6924907010759175,
						"f1_stderr,none": 0.0001725049726930762,
						"mcc,none": 0.13847035035325458,
						"mcc_stderr,none": 0.0299984185715153
					},
					"hellaswag": {
						"acc,none": 0.5385381398127863,
						"acc_norm,none": 0.7248556064528978,
						"acc_norm_stderr,none": 0.00445674310817073,
						"acc_stderr,none": 0.004974937803907464,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.23742419867167197,
						"acc_norm,none": 0.23742419867167197,
						"acc_norm_stderr,none": 0.024828600775311533,
						"acc_stderr,none": 0.024828600775311533,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816508,
						"acc_stderr,none": 0.04229525846816508,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.013314551335935941,
						"acc_stderr,none": 0.013314551335935941,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.228,
						"acc_norm,none": 0.228,
						"acc_norm_stderr,none": 0.01327374070080448,
						"acc_stderr,none": 0.01327374070080448,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.238,
						"acc_norm,none": 0.238,
						"acc_norm_stderr,none": 0.013473586661967225,
						"acc_stderr,none": 0.013473586661967225,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.013772206565168543,
						"acc_stderr,none": 0.013772206565168543,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.01806848202433441,
						"acc_stderr,none": 0.01806848202433441,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.216,
						"acc_norm,none": 0.216,
						"acc_norm_stderr,none": 0.013019735539307789,
						"acc_stderr,none": 0.013019735539307789,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485256,
						"acc_stderr,none": 0.014174516461485256,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.217,
						"acc_norm,none": 0.217,
						"acc_norm_stderr,none": 0.01304151375727071,
						"acc_stderr,none": 0.01304151375727071,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.032166339033750324,
						"acc_stderr,none": 0.032166339033750324,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.013663187134877651,
						"acc_stderr,none": 0.013663187134877651,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2076923076923077,
						"acc_norm,none": 0.2076923076923077,
						"acc_norm_stderr,none": 0.03571595663393523,
						"acc_stderr,none": 0.03571595663393523,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.205,
						"acc_norm,none": 0.205,
						"acc_norm_stderr,none": 0.01277255409611312,
						"acc_stderr,none": 0.01277255409611312,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.249,
						"acc_norm,none": 0.249,
						"acc_norm_stderr,none": 0.013681600278702293,
						"acc_stderr,none": 0.013681600278702293,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.214,
						"acc_norm,none": 0.214,
						"acc_norm_stderr,none": 0.012975838021968767,
						"acc_stderr,none": 0.012975838021968767,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.204,
						"acc_norm,none": 0.204,
						"acc_norm_stderr,none": 0.012749374359024377,
						"acc_stderr,none": 0.012749374359024377,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.251,
						"acc_norm,none": 0.251,
						"acc_norm_stderr,none": 0.013718133516888916,
						"acc_stderr,none": 0.013718133516888916,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.01341472903024712,
						"acc_stderr,none": 0.01341472903024712,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881423,
						"acc_stderr,none": 0.013588548437881423,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.013493000446937594,
						"acc_stderr,none": 0.013493000446937594,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909283,
						"acc_stderr,none": 0.04292346959909283,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.013211720158614751,
						"acc_stderr,none": 0.013211720158614751,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.013493000446937594,
						"acc_stderr,none": 0.013493000446937594,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.213,
						"acc_norm,none": 0.213,
						"acc_norm_stderr,none": 0.012953717566737228,
						"acc_stderr,none": 0.012953717566737228,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.224,
						"acc_norm,none": 0.224,
						"acc_norm_stderr,none": 0.013190830072364474,
						"acc_stderr,none": 0.013190830072364474,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.208,
						"acc_norm,none": 0.208,
						"acc_norm_stderr,none": 0.012841374572096921,
						"acc_stderr,none": 0.012841374572096921,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.231,
						"acc_norm,none": 0.231,
						"acc_norm_stderr,none": 0.013334797216936442,
						"acc_stderr,none": 0.013334797216936442,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.017061951343248966,
						"acc_stderr,none": 0.017061951343248966,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.013860415257527911,
						"acc_stderr,none": 0.013860415257527911,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.01339490288966001,
						"acc_stderr,none": 0.01339490288966001,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.013211720158614753,
						"acc_stderr,none": 0.013211720158614753,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.045126085985421276,
						"acc_stderr,none": 0.045126085985421276,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.2733333333333333,
						"acc_norm,none": 0.2733333333333333,
						"acc_norm_stderr,none": 0.025773792282785975,
						"acc_stderr,none": 0.025773792282785975,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.014205696104091517,
						"acc_stderr,none": 0.014205696104091517,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.212,
						"acc_norm,none": 0.212,
						"acc_norm_stderr,none": 0.012931481864938017,
						"acc_stderr,none": 0.012931481864938017,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774164,
						"acc_stderr,none": 0.013877773329774164,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.03027512038907304,
						"acc_stderr,none": 0.03027512038907304,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.012886662332274538,
						"acc_stderr,none": 0.012886662332274538,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.013912208651021354,
						"acc_stderr,none": 0.013912208651021354,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.030695456590127176,
						"acc_stderr,none": 0.030695456590127176,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.014205696104091512,
						"acc_stderr,none": 0.014205696104091512,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5229116421837317,
						"acc_norm,none": 0.564,
						"acc_norm_stderr,none": 0.0004927935871743445,
						"acc_stderr,none": 0.04373292368970908,
						"alias": "kobest",
						"f1,none": 0.42457673487344283,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5185185185185185,
						"acc_stderr,none": 0.013339608823275213,
						"alias": " - kobest_boolq",
						"f1,none": 0.37432990098487745,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.622,
						"acc_stderr,none": 0.01534116525402664,
						"alias": " - kobest_copa",
						"f1,none": 0.6210526315789473,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.432,
						"acc_norm,none": 0.564,
						"acc_norm_stderr,none": 0.022198954641476802,
						"acc_stderr,none": 0.022175109265613155,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.42729626158618433,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5163727959697733,
						"acc_stderr,none": 0.025112470822047955,
						"alias": " - kobest_sentineg",
						"f1,none": 0.4030075187969925,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4873015873015873,
						"acc_stderr,none": 0.014086951987375836,
						"alias": " - kobest_wic",
						"f1,none": 0.33034965034965036,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7039588589171356,
						"acc_stderr,none": 0.021849116254084748,
						"alias": "lambada",
						"perplexity,none": 3.9333502109461658,
						"perplexity_stderr,none": 0.32907151864962975
					},
					"lambada_cloze": {
						"acc,none": 0.025907238501843587,
						"acc_stderr,none": 0.00811590120467675,
						"alias": "lambada_cloze",
						"perplexity,none": 837.7205119402188,
						"perplexity_stderr,none": 297.2538701450135
					},
					"lambada_multilingual": {
						"acc,none": 0.5345235784979624,
						"acc_stderr,none": 0.08661226265525572,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.44996767123558,
						"perplexity_stderr,none": 8.507966961825264
					},
					"lambada_openai": {
						"acc,none": 0.7451969726372987,
						"acc_stderr,none": 0.006070855636710552,
						"alias": " - lambada_openai",
						"perplexity,none": 3.29603535071706,
						"perplexity_stderr,none": 0.06411833060497432
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.04152920628759946,
						"acc_stderr,none": 0.0027795745725717404,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 246.0288705478176,
						"perplexity_stderr,none": 7.423972572664894
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4193673588201048,
						"acc_stderr,none": 0.006874800634833114,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 35.83479127070583,
						"perplexity_stderr,none": 1.9953406168687726
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7455850960605472,
						"acc_stderr,none": 0.006067809764031532,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.296529170306029,
						"perplexity_stderr,none": 0.0641589847955534
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.453522220065981,
						"acc_stderr,none": 0.0069358167754394575,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.87254975964731,
						"perplexity_stderr,none": 1.423403419776367
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5443431011061518,
						"acc_stderr,none": 0.006938529026479452,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.801472923697958,
						"perplexity_stderr,none": 0.8178081821525165
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5098001164370269,
						"acc_stderr,none": 0.006964639471873748,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.444495231820785,
						"perplexity_stderr,none": 1.1964231887659422
					},
					"lambada_standard": {
						"acc,none": 0.6621385600620997,
						"acc_stderr,none": 0.006589551054654387,
						"alias": " - lambada_standard",
						"perplexity,none": 4.570551452541598,
						"perplexity_stderr,none": 0.09710809968207976
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.010285270716087716,
						"acc_stderr,none": 0.0014056427379224299,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 1429.4121533326202,
						"perplexity_stderr,none": 39.9785644103293
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.32506361323155214,
						"exact_match_stderr,get-answer": 0.01181755161409296
					},
					"logiqa": {
						"acc,none": 0.23195084485407066,
						"acc_norm,none": 0.27956989247311825,
						"acc_norm_stderr,none": 0.017602909186822453,
						"acc_stderr,none": 0.016555252497925898,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.24427480916030533,
						"acc_norm,none": 0.2786259541984733,
						"acc_norm_stderr,none": 0.011311050680723241,
						"acc_stderr,none": 0.010840097745900449,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2629815745393635,
						"acc_norm,none": 0.2639865996649916,
						"acc_norm_stderr,none": 0.0080692726944333,
						"acc_stderr,none": 0.008059394672720419,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.40616394831603475,
						"acc_stderr,none": 0.005054462366153699,
						"alias": "mc_taco",
						"f1,none": 0.5263958104569643,
						"f1_stderr,none": 0.005582752043943218
					},
					"medmcqa": {
						"acc,none": 0.3222567535261774,
						"acc_norm,none": 0.3222567535261774,
						"acc_norm_stderr,none": 0.007226726167378287,
						"acc_stderr,none": 0.007226726167378287,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.29772191673212883,
						"acc_norm,none": 0.29772191673212883,
						"acc_norm_stderr,none": 0.01282083777732473,
						"acc_stderr,none": 0.01282083777732473,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.33784361202107954,
						"acc_stderr,none": 0.06437119533451122,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4074074074074074,
						"acc_stderr,none": 0.04244633238353228,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3223684210526316,
						"acc_stderr,none": 0.038035102483515854,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252604,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.37358490566037733,
						"acc_stderr,none": 0.02977308271331987,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3541666666666667,
						"acc_stderr,none": 0.039994111357535424,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768081,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3063583815028902,
						"acc_stderr,none": 0.03514942551267437,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171452,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145632,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3446808510638298,
						"acc_stderr,none": 0.03106898596312215,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2807017543859649,
						"acc_stderr,none": 0.04227054451232199,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2827586206896552,
						"acc_stderr,none": 0.03752833958003337,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.26455026455026454,
						"acc_stderr,none": 0.02271746789770861,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3253968253968254,
						"acc_stderr,none": 0.041905964388711366,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4161290322580645,
						"acc_stderr,none": 0.028040981380761543,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2413793103448276,
						"acc_stderr,none": 0.03010833071801162,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3939393939393939,
						"acc_stderr,none": 0.038154943086889305,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.41414141414141414,
						"acc_stderr,none": 0.03509438348879629,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.45077720207253885,
						"acc_stderr,none": 0.03590910952235523,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3153846153846154,
						"acc_stderr,none": 0.023559646983189936,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2518518518518518,
						"acc_stderr,none": 0.02646611753895992,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2605042016806723,
						"acc_stderr,none": 0.02851025151234193,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.26490066225165565,
						"acc_stderr,none": 0.036030385453603826,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3963302752293578,
						"acc_stderr,none": 0.02097146994790053,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.19444444444444445,
						"acc_stderr,none": 0.02699145450203673,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4019607843137255,
						"acc_stderr,none": 0.034411900234824655,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.39662447257383965,
						"acc_stderr,none": 0.03184399873811225,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3991031390134529,
						"acc_stderr,none": 0.03286745312567961,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3969465648854962,
						"acc_stderr,none": 0.04291135671009225,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.31689691817215726,
						"acc_stderr,none": 0.055469582333788285,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.36363636363636365,
						"acc_stderr,none": 0.043913262867240704,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.04414343666854933,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3312883435582822,
						"acc_stderr,none": 0.03697983910025588,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.29464285714285715,
						"acc_stderr,none": 0.043270409325787296,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4368932038834951,
						"acc_stderr,none": 0.04911147107365777,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.4444444444444444,
						"acc_stderr,none": 0.03255326307272485,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4955300127713921,
						"acc_stderr,none": 0.017879248970584374,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.26878612716763006,
						"acc_stderr,none": 0.02386800326250011,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2435754189944134,
						"acc_stderr,none": 0.014355911964767865,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3627450980392157,
						"acc_stderr,none": 0.027530078447110314,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.39233987769552625,
						"acc_stderr,none": 0.059471915432580055,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.43729903536977494,
						"acc_stderr,none": 0.028173917761762875,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.36728395061728397,
						"acc_stderr,none": 0.02682280175950788,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.26595744680851063,
						"acc_stderr,none": 0.026358065698880585,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.28552803129074317,
						"acc_stderr,none": 0.011535751586665668,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3786764705882353,
						"acc_stderr,none": 0.02946513363977613,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3349673202614379,
						"acc_stderr,none": 0.019094228167000307,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.42727272727272725,
						"acc_stderr,none": 0.04738198703545483,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2612244897959184,
						"acc_stderr,none": 0.028123429335142797,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3558661033474163,
						"acc_stderr,none": 0.056548174472976435,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.43781094527363185,
						"acc_stderr,none": 0.0350808011219984,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.29781160799238815,
						"acc_stderr,none": 0.06628074459375635,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3192771084337349,
						"acc_stderr,none": 0.03629335329947859,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.43859649122807015,
						"acc_stderr,none": 0.038057975055904594,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7496688741721854,
						"acc_stderr,none": 0.004372895904636471,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7546786004882018,
						"acc_stderr,none": 0.004339601857743967,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7328431372549019,
						"acc_stderr,none": 0.021932668544150203,
						"alias": "mrpc",
						"f1,none": 0.8310077519379845,
						"f1_stderr,none": 0.01595896197437224
					},
					"multimedqa": {
						"acc,none": 0.351596877217885,
						"acc_norm,none": 0.3117438820132781,
						"acc_norm_stderr,none": 0.00010859886158936637,
						"acc_stderr,none": 0.09800131879497548,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5377475247524752,
						"acc_stderr,none": 0.007161307508196869,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7032543281534724,
						"mrr_stderr,none": 0.010328932714832767,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.417607223476298,
						"r@2_stderr,none": 0.016577550348797442
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6558502650153287,
						"mrr_stderr,none": 0.010421067533055833,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750955,
						"r@2,none": 0.4683972911963883,
						"r@2_stderr,none": 0.016773710557640358
					},
					"openbookqa": {
						"acc,none": 0.308,
						"acc_norm,none": 0.416,
						"acc_norm_stderr,none": 0.022064943313928866,
						"acc_stderr,none": 0.020667032987466104,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.436,
						"acc_stderr,none": 0.011091145421162662,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3675,
						"acc_stderr,none": 0.010783321149233215,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.425,
						"acc_stderr,none": 0.01105660998281833,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.011130400617630758,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5495,
						"acc_stderr,none": 0.011128198119942881,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.01116020945760289,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.011172305500884876,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48292857142857143,
						"acc_stderr,none": 0.05630172097444685,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.76550598476605,
						"acc_norm,none": 0.778563656147987,
						"acc_norm_stderr,none": 0.009687616456840265,
						"acc_stderr,none": 0.00988520314324055,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.25421648163962424,
						"acc_norm,none": 0.2857600341588386,
						"acc_norm_stderr,none": 0.0033006233654095467,
						"acc_stderr,none": 0.003181129827861853,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.01996610354027947,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7257269633525867,
						"acc_norm,none": 0.6261037330259638,
						"acc_norm_stderr,none": 0.010042079813284158,
						"acc_stderr,none": 0.1553317186464892,
						"alias": "pythia",
						"bits_per_byte,none": 0.6350408359742838,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5529817070558825,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.29603535071706,
						"perplexity_stderr,none": 0.06411833060497432,
						"word_perplexity,none": 10.525738288859136,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.38120567375886527,
						"acc_norm,none": 0.4592198581560284,
						"acc_norm_stderr,none": 0.05704819939414727,
						"acc_stderr,none": 0.04475763006031279,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.44166666666666665,
						"acc_norm,none": 0.5666666666666667,
						"acc_norm_stderr,none": 0.04542567625794981,
						"acc_stderr,none": 0.04552192400253556,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.0393415738622931,
						"acc_stderr,none": 0.03749999999999997,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.38028169014084506,
						"acc_norm,none": 0.426056338028169,
						"acc_norm_stderr,none": 0.029395099159697805,
						"acc_stderr,none": 0.028857363751758302,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4942339373970346,
						"acc_stderr,none": 0.006764960671142521,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.698367548849864,
						"acc_stderr,none": 0.002282624936350404,
						"alias": "qqp",
						"f1,none": 0.6912736386420597,
						"f1_stderr,none": 0.002659486117296161
					},
					"race": {
						"acc,none": 0.3559808612440191,
						"acc_stderr,none": 0.01481878040053811,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2762,
						"em_stderr,none": 0.004471392997214356,
						"f1,none": 0.286251905015111,
						"f1_stderr,none": 0.004481066110597141
					},
					"rte": {
						"acc,none": 0.6462093862815884,
						"acc_stderr,none": 0.028780957835424687,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.946,
						"acc_norm,none": 0.921,
						"acc_norm_stderr,none": 0.008534156773333438,
						"acc_stderr,none": 0.007150883521295445,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6425992779783394,
						"acc_stderr,none": 0.028846510722612004,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8922018348623854,
						"acc_stderr,none": 0.010508195955513555,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5794761571528542,
						"acc_norm,none": 0.7708187543736879,
						"acc_norm_stderr,none": 0.0029716413731909297,
						"acc_stderr,none": 0.003490147810519478,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6208445642407907,
						"acc_stderr,none": 0.062455828774531034,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5410657051282052,
						"acc_stderr,none": 0.004987348592629571,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.7821019560149995,
						"acc_stderr,none": 0.0041561172996879316,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5429411764705883,
						"acc_stderr,none": 0.004932687955670626,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.313007484837158,
						"acc_stderr,none": 0.0014575418233732438,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -8.36280859898459,
						"bleu_diff_stderr,none": 0.870727832787657,
						"bleu_max,none": 27.056127303161382,
						"bleu_max_stderr,none": 0.8066576080415484,
						"rouge1_acc,none": 0.2668298653610771,
						"rouge1_acc_stderr,none": 0.015483691939237255,
						"rouge1_diff,none": -10.693641183782578,
						"rouge1_diff_stderr,none": 0.9249413681972198,
						"rouge1_max,none": 52.269419787636295,
						"rouge1_max_stderr,none": 0.875592158608712,
						"rouge2_acc,none": 0.2460220318237454,
						"rouge2_acc_stderr,none": 0.015077219200662594,
						"rouge2_diff,none": -12.81645946294325,
						"rouge2_diff_stderr,none": 1.1305558228758767,
						"rouge2_max,none": 36.11351432874707,
						"rouge2_max_stderr,none": 1.0227982672085405,
						"rougeL_acc,none": 0.2668298653610771,
						"rougeL_acc_stderr,none": 0.015483691939237256,
						"rougeL_diff,none": -11.0057879493879,
						"rougeL_diff_stderr,none": 0.9394494459238267,
						"rougeL_max,none": 49.41196639702691,
						"rougeL_max_stderr,none": 0.8938359500531418
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -8.36280859898459,
						"bleu_diff_stderr,none": 0.870727832787657,
						"bleu_max,none": 27.056127303161382,
						"bleu_max_stderr,none": 0.8066576080415484,
						"rouge1_acc,none": 0.2668298653610771,
						"rouge1_acc_stderr,none": 0.015483691939237255,
						"rouge1_diff,none": -10.693641183782578,
						"rouge1_diff_stderr,none": 0.9249413681972198,
						"rouge1_max,none": 52.269419787636295,
						"rouge1_max_stderr,none": 0.875592158608712,
						"rouge2_acc,none": 0.2460220318237454,
						"rouge2_acc_stderr,none": 0.015077219200662594,
						"rouge2_diff,none": -12.81645946294325,
						"rouge2_diff_stderr,none": 1.1305558228758767,
						"rouge2_max,none": 36.11351432874707,
						"rouge2_max_stderr,none": 1.0227982672085405,
						"rougeL_acc,none": 0.2668298653610771,
						"rougeL_acc_stderr,none": 0.015483691939237256,
						"rougeL_diff,none": -11.0057879493879,
						"rougeL_diff_stderr,none": 0.9394494459238267,
						"rougeL_max,none": 49.41196639702691,
						"rougeL_max_stderr,none": 0.8938359500531418
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2423500611995104,
						"acc_stderr,none": 0.015000674373570345,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.38366490847480567,
						"acc_stderr,none": 0.013876409776903074,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.0625,
						"exact_match_stderr,none": 0.005371192100365162
					},
					"wic": {
						"acc,none": 0.54858934169279,
						"acc_stderr,none": 0.01971695617658775,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6350587225591087,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.553000961098103,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.526436146175392,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6921862667719021,
						"acc_stderr,none": 0.012972946661205027,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.3942307692307692,
						"acc_stderr,none": 0.048151547759907105,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8644688644688645,
						"acc_stderr,none": 0.02075438001546627,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6192727272727273,
						"acc_stderr,none": 0.06941891891331842,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.021966635293832915,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.020186703693570847,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.019874354831287487,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.02238235778196214,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407056,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.022094713229761784,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.022094713229761784,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.021513662527582404,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.020395095484936603,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.020629569998345396,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.441285140562249,
						"acc_stderr,none": 0.049945032815202337,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3365461847389558,
						"acc_stderr,none": 0.009471423054177142,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4907630522088353,
						"acc_stderr,none": 0.010020362530631355,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4911646586345382,
						"acc_stderr,none": 0.010020508033762626,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39196787148594375,
						"acc_stderr,none": 0.009785342947722884,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5273092369477912,
						"acc_stderr,none": 0.01000711288973199,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5124497991967871,
						"acc_stderr,none": 0.010018965593055386,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5040160642570282,
						"acc_stderr,none": 0.010021749574555905,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42530120481927713,
						"acc_stderr,none": 0.009909597192221132,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4931726907630522,
						"acc_stderr,none": 0.010021138522919163,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39558232931726905,
						"acc_stderr,none": 0.00980109434713499,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41044176706827307,
						"acc_stderr,none": 0.0098599946725851,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.45220883534136547,
						"acc_stderr,none": 0.00997618708680372,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41646586345381525,
						"acc_stderr,none": 0.009881215932115996,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42449799196787147,
						"acc_stderr,none": 0.009907151253284277,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3473895582329317,
						"acc_stderr,none": 0.0095438354093349,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6243306660249083,
						"acc_stderr,none": 0.060999206837165164,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.587028457974851,
						"acc_stderr,none": 0.01267071629096672,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7736598279285242,
						"acc_stderr,none": 0.010768801472359075,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7094639311714097,
						"acc_stderr,none": 0.011683600935499847,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5618795499669094,
						"acc_stderr,none": 0.012768206616277762,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5969556585043018,
						"acc_stderr,none": 0.01262289521590771,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6598279285241562,
						"acc_stderr,none": 0.012192034998028834,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5446724023825281,
						"acc_stderr,none": 0.012815666542067292,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6783587028457975,
						"acc_stderr,none": 0.012020627225185133,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5486432825943084,
						"acc_stderr,none": 0.012806088966122401,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5876902713434812,
						"acc_stderr,none": 0.012667694122397037,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6194573130377233,
						"acc_stderr,none": 0.012494500786685344,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.814789840413576,
						"acc_stderr,none": 0.03771108790086885,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8756989247311828,
						"acc_stderr,none": 0.0068437918007221024,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.05066394254941721,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7539103232533889,
						"acc_stderr,none": 0.013916300191059506,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7832699619771863,
						"acc_stderr,none": 0.025454504291142595,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6761904761904762,
						"acc_stderr,none": 0.02640672299673,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7718253968253969,
						"acc_stderr,none": 0.018711525330668,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C0-PRERUN-rwkv-30_pth"
	},
	"./rwkv-x-dev/1_3-C0-PRERUN-rwkv-450_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6282412626832018,
						"acc_norm,none": 0.6257046223224352,
						"acc_norm_stderr,none": 0.08914249003419845,
						"acc_stderr,none": 0.10705248023288712,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4246875,
						"acc_stderr,none": 0.03946645633236462,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.16859999999999997,
						"acc_stderr,none": 0.15100304383911622,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8115223880597014,
						"acc_stderr,none": 0.16630817043475898,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2615156017830609,
						"acc_norm,none": 0.2615156017830609,
						"acc_norm_stderr,none": 0.12131676944458769,
						"acc_stderr,none": 0.12131676944458769,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2735278881022276,
						"acc_norm,none": 0.2735278881022276,
						"acc_norm_stderr,none": 0.043888377676807575,
						"acc_stderr,none": 0.043888377676807575,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.457737030411449,
						"likelihood_diff_stderr,none": 0.4788602648888117,
						"pct_stereotype,none": 0.6076326774001193,
						"pct_stereotype_stderr,none": 0.07055852336353173
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.04822834645669291,
						"exact_match_stderr,none": 0.00475403754675294
					},
					"glue": {
						"acc,none": 0.6741156503096712,
						"acc_stderr,none": 0.0023021607926666065,
						"alias": "glue",
						"f1,none": 0.6778317719583262,
						"f1_stderr,none": 0.00017468834709311726,
						"mcc,none": 0.09536869672419312,
						"mcc_stderr,none": 0.0340185082152466
					},
					"kmmlu": {
						"acc,none": 0.2886803349696795,
						"acc_norm,none": 0.2886803349696795,
						"acc_norm_stderr,none": 0.02926974507521937,
						"acc_stderr,none": 0.02926974507521937,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5178688884016663,
						"acc_norm,none": 0.534,
						"acc_norm_stderr,none": 0.0004986853707414854,
						"acc_stderr,none": 0.04777794873787563,
						"alias": "kobest",
						"f1,none": 0.4151546842287524,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7105569571123618,
						"acc_stderr,none": 0.01761325953202111,
						"alias": "lambada",
						"perplexity,none": 3.7897795938087406,
						"perplexity_stderr,none": 0.24737225124907478
					},
					"lambada_cloze": {
						"acc,none": 0.021637880846109063,
						"acc_stderr,none": 0.00731864283212087,
						"alias": "lambada_cloze",
						"perplexity,none": 788.7463760510478,
						"perplexity_stderr,none": 278.1164648319947
					},
					"lambada_multilingual": {
						"acc,none": 0.5342518921016883,
						"acc_stderr,none": 0.08167388307393524,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.816559476450905,
						"perplexity_stderr,none": 8.46429772565319
					},
					"mmlu": {
						"acc,none": 0.3291553909699473,
						"acc_stderr,none": 0.05925599958722308,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.31179596174282675,
						"acc_stderr,none": 0.047353702623963434,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.37141937560347604,
						"acc_stderr,none": 0.060670398264925963,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.34546636334091646,
						"acc_stderr,none": 0.057415569628345986,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2974944497304155,
						"acc_stderr,none": 0.06047221629841026,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.3337118523775727,
						"acc_norm,none": 0.29263045073774707,
						"acc_norm_stderr,none": 0.00010988268919969268,
						"acc_stderr,none": 0.10512957609472445,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4887142857142857,
						"acc_stderr,none": 0.05361451505014049,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7237925278695598,
						"acc_norm,none": 0.6293903468366571,
						"acc_norm_stderr,none": 0.009909342146020217,
						"acc_stderr,none": 0.15519284847316417,
						"alias": "pythia",
						"bits_per_byte,none": 0.6345824922381478,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5524884036477866,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.321400417003482,
						"perplexity_stderr,none": 0.06492495717505936,
						"word_perplexity,none": 10.507871469030745,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3882978723404255,
						"acc_norm,none": 0.45390070921985815,
						"acc_norm_stderr,none": 0.05286947704489444,
						"acc_stderr,none": 0.042424013480532444,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6128248643971914,
						"acc_stderr,none": 0.0592801269883518,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.30906967456484424,
						"acc_stderr,none": 0.0012800202812191187,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3047735618115055,
						"bleu_acc_stderr,none": 0.016114124156882452,
						"bleu_diff,none": -8.391143762370323,
						"bleu_diff_stderr,none": 0.8837056284781842,
						"bleu_max,none": 27.248776418132984,
						"bleu_max_stderr,none": 0.8178494563754359,
						"rouge1_acc,none": 0.2717258261933905,
						"rouge1_acc_stderr,none": 0.015572840452875823,
						"rouge1_diff,none": -10.631012681766778,
						"rouge1_diff_stderr,none": 0.944916002488598,
						"rouge1_max,none": 52.48414434315671,
						"rouge1_max_stderr,none": 0.8776883037782627,
						"rouge2_acc,none": 0.2558139534883721,
						"rouge2_acc_stderr,none": 0.015274176219283352,
						"rouge2_diff,none": -12.816335562870293,
						"rouge2_diff_stderr,none": 1.148673224638078,
						"rouge2_max,none": 36.39645976090811,
						"rouge2_max_stderr,none": 1.034015951509814,
						"rougeL_acc,none": 0.26805385556915545,
						"rougeL_acc_stderr,none": 0.015506204722834543,
						"rougeL_diff,none": -11.093881938850641,
						"rougeL_diff_stderr,none": 0.9569023141638676,
						"rougeL_max,none": 49.6021140078436,
						"rougeL_max_stderr,none": 0.8999268428171971
					},
					"xcopa": {
						"acc,none": 0.618181818181818,
						"acc_stderr,none": 0.07286227685763978,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4372958500669344,
						"acc_stderr,none": 0.0515984106636476,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6242705011732147,
						"acc_stderr,none": 0.06068365480176174,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8156889188581704,
						"acc_stderr,none": 0.04566655631378652,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6282412626832018,
						"acc_norm,none": 0.6257046223224352,
						"acc_norm_stderr,none": 0.08914249003419845,
						"acc_stderr,none": 0.10705248023288712,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4246875,
						"acc_stderr,none": 0.03946645633236462,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.015819173374302706,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.406,
						"acc_stderr,none": 0.015537226438634607,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.37583333333333335,
						"acc_stderr,none": 0.0139874434005936,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.40187713310580203,
						"acc_norm,none": 0.4377133105802048,
						"acc_norm_stderr,none": 0.01449757388110829,
						"acc_stderr,none": 0.01432726861457828,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.73989898989899,
						"acc_norm,none": 0.7184343434343434,
						"acc_norm_stderr,none": 0.00922893476451929,
						"acc_stderr,none": 0.009001718541079957,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.16859999999999997,
						"acc_stderr,none": 0.15100304383911622,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.076,
						"acc_stderr,none": 0.005927019890500651,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.01118277815498588,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.2145,
						"acc_stderr,none": 0.009180796180574612,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.4895,
						"acc_stderr,none": 0.011180669867648658,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.1125,
						"acc_stderr,none": 0.007067308428546994,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.145,
						"acc_stderr,none": 0.007875183125148772,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.048,
						"acc_stderr,none": 0.004781153596660238,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0545,
						"acc_stderr,none": 0.005077180702116216,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0315,
						"acc_stderr,none": 0.003906597720891822,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0105,
						"acc_stderr,none": 0.0022797968630709872,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.019522776572668113,
						"acc_stderr,none": 0.002882360578290262,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8115223880597014,
						"acc_stderr,none": 0.16630817043475898,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592085,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689092,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298545,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.843,
						"acc_stderr,none": 0.011510146979230184,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400236,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.012953717566737237,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.555,
						"acc_stderr,none": 0.015723301886760934,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.836,
						"acc_stderr,none": 0.011715000693181305,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042967,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098728,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.00420638724961148,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323506,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140918,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140921,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.00977991035984717,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753651,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656796,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.010811655372416054,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.665,
						"acc_stderr,none": 0.014933117490932575,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.687,
						"acc_stderr,none": 0.014671272822977892,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.757,
						"acc_stderr,none": 0.013569640199177451,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832013,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849877,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178339,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.332,
						"acc_stderr,none": 0.014899597242811483,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787738,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.811,
						"acc_stderr,none": 0.012386784588117712,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.579,
						"acc_stderr,none": 0.015620595475301318,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.703,
						"acc_stderr,none": 0.014456832294801105,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333445,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381793,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753655,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745904,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.684,
						"acc_stderr,none": 0.014709193056057127,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695792,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.364,
						"acc_stderr,none": 0.015222868840522022,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.015594460144140603,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.523,
						"acc_stderr,none": 0.015802554246726094,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.837,
						"acc_stderr,none": 0.011686212712746842,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.015763390640483703,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.00953361892934098,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248116,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.747,
						"acc_stderr,none": 0.01375427861358708,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244078,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.001000000000000009,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400252,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.0109781838443578,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.015809045699406728,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280307,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357796,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.00442940398017836,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.711,
						"acc_stderr,none": 0.014341711358296172,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.439,
						"acc_stderr,none": 0.01570113134540077,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381791,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118587,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.598,
						"acc_stderr,none": 0.015512467135715075,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.01070937396352802,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747391,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.013512312258920843,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378222,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792956,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487914,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.0043194510829106065,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274702,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.015039986742055242,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.295,
						"acc_stderr,none": 0.014428554438445504,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7262996941896025,
						"acc_stderr,none": 0.0077980876386284275,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8035714285714286,
						"acc_stderr,none": 0.05357142857142858,
						"alias": "cb",
						"f1,none": 0.6455026455026455,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2615156017830609,
						"acc_norm,none": 0.2615156017830609,
						"acc_norm_stderr,none": 0.12131676944458769,
						"acc_stderr,none": 0.12131676944458769,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522557,
						"acc_stderr,none": 0.11369720523522557,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5151515151515151,
						"acc_norm,none": 0.5151515151515151,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.07872958216222171,
						"acc_stderr,none": 0.07872958216222171,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.0659529705144534,
						"acc_stderr,none": 0.0659529705144534,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.05781449705557245,
						"acc_stderr,none": 0.05781449705557245,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.07802030664724673,
						"acc_stderr,none": 0.07802030664724673,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.06861056852129647,
						"acc_stderr,none": 0.06861056852129647,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.060073850409370216,
						"acc_stderr,none": 0.060073850409370216,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4583333333333333,
						"acc_norm,none": 0.4583333333333333,
						"acc_norm_stderr,none": 0.10389457216622949,
						"acc_stderr,none": 0.10389457216622949,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.4482758620689655,
						"acc_norm,none": 0.4482758620689655,
						"acc_norm_stderr,none": 0.09398415777506855,
						"acc_stderr,none": 0.09398415777506855,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.07335878043508443,
						"acc_stderr,none": 0.07335878043508443,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.06859222936927092,
						"acc_stderr,none": 0.06859222936927092,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2735278881022276,
						"acc_norm,none": 0.2735278881022276,
						"acc_norm_stderr,none": 0.043888377676807575,
						"acc_stderr,none": 0.043888377676807575,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.0353866849031339,
						"acc_stderr,none": 0.0353866849031339,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.03839344480212195,
						"acc_stderr,none": 0.03839344480212195,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.03501438706296781,
						"acc_stderr,none": 0.03501438706296781,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.28708133971291866,
						"acc_norm,none": 0.28708133971291866,
						"acc_norm_stderr,none": 0.031368287214891676,
						"acc_stderr,none": 0.031368287214891676,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.034049163262375844,
						"acc_stderr,none": 0.034049163262375844,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.25190839694656486,
						"acc_norm,none": 0.25190839694656486,
						"acc_norm_stderr,none": 0.03807387116306086,
						"acc_stderr,none": 0.03807387116306086,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.039215686274509776,
						"acc_stderr,none": 0.039215686274509776,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.29906542056074764,
						"acc_norm,none": 0.29906542056074764,
						"acc_norm_stderr,none": 0.044470182376718334,
						"acc_stderr,none": 0.044470182376718334,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2755417956656347,
						"acc_norm,none": 0.2755417956656347,
						"acc_norm_stderr,none": 0.02489845928700081,
						"acc_stderr,none": 0.02489845928700081,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28921568627450983,
						"acc_norm,none": 0.28921568627450983,
						"acc_norm_stderr,none": 0.031822318676475544,
						"acc_stderr,none": 0.031822318676475544,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.29608938547486036,
						"acc_norm,none": 0.29608938547486036,
						"acc_norm_stderr,none": 0.03421843754304871,
						"acc_stderr,none": 0.03421843754304871,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24050632911392406,
						"acc_norm,none": 0.24050632911392406,
						"acc_norm_stderr,none": 0.027820781981149675,
						"acc_stderr,none": 0.027820781981149675,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3644859813084112,
						"acc_norm,none": 0.3644859813084112,
						"acc_norm_stderr,none": 0.04674660221110773,
						"acc_stderr,none": 0.04674660221110773,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.330188679245283,
						"acc_norm,none": 0.330188679245283,
						"acc_norm_stderr,none": 0.045894715469579954,
						"acc_stderr,none": 0.045894715469579954,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2037037037037037,
						"acc_norm,none": 0.2037037037037037,
						"acc_norm_stderr,none": 0.03893542518824847,
						"acc_stderr,none": 0.03893542518824847,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.24175824175824176,
						"acc_norm,none": 0.24175824175824176,
						"acc_norm_stderr,none": 0.0259603199968527,
						"acc_stderr,none": 0.0259603199968527,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.03198001660115071,
						"acc_stderr,none": 0.03198001660115071,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.26900584795321636,
						"acc_norm,none": 0.26900584795321636,
						"acc_norm_stderr,none": 0.03401052620104089,
						"acc_stderr,none": 0.03401052620104089,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.272108843537415,
						"acc_norm,none": 0.272108843537415,
						"acc_norm_stderr,none": 0.036832239154550236,
						"acc_stderr,none": 0.036832239154550236,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2589928057553957,
						"acc_norm,none": 0.2589928057553957,
						"acc_norm_stderr,none": 0.037291986581642324,
						"acc_stderr,none": 0.037291986581642324,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.27044025157232704,
						"acc_norm,none": 0.27044025157232704,
						"acc_norm_stderr,none": 0.03533764101912229,
						"acc_stderr,none": 0.03533764101912229,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3067484662576687,
						"acc_norm,none": 0.3067484662576687,
						"acc_norm_stderr,none": 0.036230899157241474,
						"acc_stderr,none": 0.036230899157241474,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2616279069767442,
						"acc_norm,none": 0.2616279069767442,
						"acc_norm_stderr,none": 0.03361101403890495,
						"acc_stderr,none": 0.03361101403890495,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.02851445657342142,
						"acc_stderr,none": 0.02851445657342142,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.25252525252525254,
						"acc_norm,none": 0.25252525252525254,
						"acc_norm_stderr,none": 0.030954055470365914,
						"acc_stderr,none": 0.030954055470365914,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.35714285714285715,
						"acc_norm,none": 0.35714285714285715,
						"acc_norm_stderr,none": 0.031124619309328177,
						"acc_stderr,none": 0.031124619309328177,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.22608695652173913,
						"acc_norm,none": 0.22608695652173913,
						"acc_norm_stderr,none": 0.027641785707241337,
						"acc_stderr,none": 0.027641785707241337,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.037857144650666544,
						"acc_stderr,none": 0.037857144650666544,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.27972027972027974,
						"acc_norm,none": 0.27972027972027974,
						"acc_norm_stderr,none": 0.037667638895398516,
						"acc_stderr,none": 0.037667638895398516,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2784090909090909,
						"acc_norm,none": 0.2784090909090909,
						"acc_norm_stderr,none": 0.03388193526335357,
						"acc_stderr,none": 0.03388193526335357,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.28187919463087246,
						"acc_norm,none": 0.28187919463087246,
						"acc_norm_stderr,none": 0.036982767559851006,
						"acc_stderr,none": 0.036982767559851006,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2627118644067797,
						"acc_norm,none": 0.2627118644067797,
						"acc_norm_stderr,none": 0.04068792432070351,
						"acc_stderr,none": 0.04068792432070351,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878284,
						"acc_stderr,none": 0.04122066502878284,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.037950002128017815,
						"acc_stderr,none": 0.037950002128017815,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.038095238095238106,
						"acc_stderr,none": 0.038095238095238106,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.03209281645145385,
						"acc_stderr,none": 0.03209281645145385,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.29651162790697677,
						"acc_norm,none": 0.29651162790697677,
						"acc_norm_stderr,none": 0.03492619473255952,
						"acc_stderr,none": 0.03492619473255952,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25790754257907544,
						"acc_norm,none": 0.25790754257907544,
						"acc_norm_stderr,none": 0.021605737836583285,
						"acc_stderr,none": 0.021605737836583285,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.32242990654205606,
						"acc_norm,none": 0.32242990654205606,
						"acc_norm_stderr,none": 0.03202616755131743,
						"acc_stderr,none": 0.03202616755131743,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.04119323030208568,
						"acc_stderr,none": 0.04119323030208568,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.28688524590163933,
						"acc_norm,none": 0.28688524590163933,
						"acc_norm_stderr,none": 0.041118866352671826,
						"acc_stderr,none": 0.041118866352671826,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2904761904761905,
						"acc_norm,none": 0.2904761904761905,
						"acc_norm_stderr,none": 0.03140260048069878,
						"acc_stderr,none": 0.03140260048069878,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.29444444444444445,
						"acc_norm,none": 0.29444444444444445,
						"acc_norm_stderr,none": 0.03406754001349688,
						"acc_stderr,none": 0.03406754001349688,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.032947543143888765,
						"acc_stderr,none": 0.032947543143888765,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.04167808180844153,
						"acc_stderr,none": 0.04167808180844153,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.037245636197746325,
						"acc_stderr,none": 0.037245636197746325,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2742857142857143,
						"acc_norm,none": 0.2742857142857143,
						"acc_norm_stderr,none": 0.03382281937517294,
						"acc_stderr,none": 0.03382281937517294,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.25118483412322273,
						"acc_norm,none": 0.25118483412322273,
						"acc_norm_stderr,none": 0.029927771242945208,
						"acc_stderr,none": 0.029927771242945208,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.022360679774997897,
						"acc_stderr,none": 0.022360679774997897,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.02940699535939458,
						"acc_stderr,none": 0.02940699535939458,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.29310344827586204,
						"acc_norm,none": 0.29310344827586204,
						"acc_norm_stderr,none": 0.03460711084041232,
						"acc_stderr,none": 0.03460711084041232,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.2743362831858407,
						"acc_norm,none": 0.2743362831858407,
						"acc_norm_stderr,none": 0.029745280627623653,
						"acc_stderr,none": 0.029745280627623653,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2606060606060606,
						"acc_norm,none": 0.2606060606060606,
						"acc_norm_stderr,none": 0.034277431758165236,
						"acc_stderr,none": 0.034277431758165236,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676974,
						"acc_stderr,none": 0.03410167836676974,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.0357142857142857,
						"acc_stderr,none": 0.0357142857142857,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.036121818481912725,
						"acc_stderr,none": 0.036121818481912725,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.09536869672419312,
						"mcc_stderr,none": 0.0340185082152466
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.032659863237109066,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.457737030411449,
						"likelihood_diff_stderr,none": 0.4788602648888117,
						"pct_stereotype,none": 0.6076326774001193,
						"pct_stereotype_stderr,none": 0.07055852336353173
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.6498211091234345,
						"likelihood_diff_stderr,none": 0.08697731538740652,
						"pct_stereotype,none": 0.6392367322599881,
						"pct_stereotype_stderr,none": 0.011730189016435891
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.18543956043956,
						"likelihood_diff_stderr,none": 0.39573505931295,
						"pct_stereotype,none": 0.6703296703296703,
						"pct_stereotype_stderr,none": 0.04955219508596588
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.829545454545454,
						"likelihood_diff_stderr,none": 1.8847679450297246,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.984615384615385,
						"likelihood_diff_stderr,none": 0.6166479670255034,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.59921875,
						"likelihood_diff_stderr,none": 0.1580966442690949,
						"pct_stereotype,none": 0.64375,
						"pct_stereotype_stderr,none": 0.026812710310024235
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.5619212962962963,
						"likelihood_diff_stderr,none": 0.239936611356075,
						"pct_stereotype,none": 0.5787037037037037,
						"pct_stereotype_stderr,none": 0.03367462138896078
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.7465277777777777,
						"likelihood_diff_stderr,none": 0.30605613916040114,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.4640748031496065,
						"likelihood_diff_stderr,none": 0.15141522319289705,
						"pct_stereotype,none": 0.5433070866141733,
						"pct_stereotype_stderr,none": 0.02212232873137453
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.6114864864864864,
						"likelihood_diff_stderr,none": 0.34287885241112825,
						"pct_stereotype,none": 0.7387387387387387,
						"pct_stereotype_stderr,none": 0.04188770861432398
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.98252688172043,
						"likelihood_diff_stderr,none": 0.43021845446013124,
						"pct_stereotype,none": 0.8924731182795699,
						"pct_stereotype_stderr,none": 0.03229700003364
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.1703947368421055,
						"likelihood_diff_stderr,none": 0.25778226158100637,
						"pct_stereotype,none": 0.6736842105263158,
						"pct_stereotype_stderr,none": 0.034104864353344894
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.2666219439475253,
						"likelihood_diff_stderr,none": 0.07611046331937762,
						"pct_stereotype,none": 0.5766249254621347,
						"pct_stereotype_stderr,none": 0.012069029300507982
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.3986111111111112,
						"likelihood_diff_stderr,none": 0.3238400892714442,
						"pct_stereotype,none": 0.5444444444444444,
						"pct_stereotype_stderr,none": 0.05279009646630345
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.25,
						"likelihood_diff_stderr,none": 0.8325316656794349,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.662878787878788,
						"likelihood_diff_stderr,none": 0.4147095911543505,
						"pct_stereotype,none": 0.6818181818181818,
						"pct_stereotype_stderr,none": 0.05777171902747656
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.898753894080997,
						"likelihood_diff_stderr,none": 0.13705911160362563,
						"pct_stereotype,none": 0.5482866043613707,
						"pct_stereotype_stderr,none": 0.027820204204815794
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.481225296442688,
						"likelihood_diff_stderr,none": 0.20986654233114999,
						"pct_stereotype,none": 0.4624505928853755,
						"pct_stereotype_stderr,none": 0.031408094828172445
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.642361111111111,
						"likelihood_diff_stderr,none": 0.48315791268062463,
						"pct_stereotype,none": 0.6388888888888888,
						"pct_stereotype_stderr,none": 0.05700381461700859
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.8342391304347827,
						"likelihood_diff_stderr,none": 0.13893588122942996,
						"pct_stereotype,none": 0.49782608695652175,
						"pct_stereotype_stderr,none": 0.023337780813399874
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.2858695652173915,
						"likelihood_diff_stderr,none": 0.2844652755360004,
						"pct_stereotype,none": 0.7565217391304347,
						"pct_stereotype_stderr,none": 0.04019651260878071
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.868131868131868,
						"likelihood_diff_stderr,none": 0.33315793598141574,
						"pct_stereotype,none": 0.7802197802197802,
						"pct_stereotype_stderr,none": 0.04364972632898534
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.6294642857142856,
						"likelihood_diff_stderr,none": 0.24583855782526345,
						"pct_stereotype,none": 0.6989795918367347,
						"pct_stereotype_stderr,none": 0.03284830105527339
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.04822834645669291,
						"exact_match_stderr,none": 0.00475403754675294
					},
					"glue": {
						"acc,none": 0.6741156503096712,
						"acc_stderr,none": 0.0023021607926666065,
						"alias": "glue",
						"f1,none": 0.6778317719583262,
						"f1_stderr,none": 0.00017468834709311726,
						"mcc,none": 0.09536869672419312,
						"mcc_stderr,none": 0.0340185082152466
					},
					"hellaswag": {
						"acc,none": 0.5338577972515435,
						"acc_norm,none": 0.7202748456482773,
						"acc_norm_stderr,none": 0.004479467619464795,
						"acc_stderr,none": 0.004978328190775522,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2886803349696795,
						"acc_norm,none": 0.2886803349696795,
						"acc_norm_stderr,none": 0.02926974507521937,
						"acc_stderr,none": 0.02926974507521937,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.040201512610368445,
						"acc_stderr,none": 0.040201512610368445,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485246,
						"acc_stderr,none": 0.014174516461485246,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.014205696104091503,
						"acc_stderr,none": 0.014205696104091503,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986186,
						"acc_stderr,none": 0.014062601350986186,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440472,
						"acc_stderr,none": 0.013946271849440472,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.017922109344016893,
						"acc_stderr,none": 0.017922109344016893,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.014370995982377939,
						"acc_stderr,none": 0.014370995982377939,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.348,
						"acc_norm,none": 0.348,
						"acc_norm_stderr,none": 0.015070604603768408,
						"acc_stderr,none": 0.015070604603768408,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361425,
						"acc_stderr,none": 0.014498627873361425,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.03232801420614267,
						"acc_stderr,none": 0.03232801420614267,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.298,
						"acc_norm,none": 0.298,
						"acc_norm_stderr,none": 0.014470846741134713,
						"acc_stderr,none": 0.014470846741134713,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2692307692307692,
						"acc_norm,none": 0.2692307692307692,
						"acc_norm_stderr,none": 0.03905328918744188,
						"acc_stderr,none": 0.03905328918744188,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.04093601807403326,
						"acc_stderr,none": 0.04093601807403326,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.311,
						"acc_norm,none": 0.311,
						"acc_norm_stderr,none": 0.014645596385722694,
						"acc_stderr,none": 0.014645596385722694,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.328,
						"acc_norm,none": 0.328,
						"acc_norm_stderr,none": 0.01485384248727033,
						"acc_stderr,none": 0.01485384248727033,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.013825416526895019,
						"acc_stderr,none": 0.013825416526895019,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.01420569610409151,
						"acc_stderr,none": 0.01420569610409151,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.013895037677965134,
						"acc_stderr,none": 0.013895037677965134,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.297,
						"acc_norm,none": 0.297,
						"acc_norm_stderr,none": 0.0144568322948011,
						"acc_stderr,none": 0.0144568322948011,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.014356395999905684,
						"acc_stderr,none": 0.014356395999905684,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.321,
						"acc_norm,none": 0.321,
						"acc_norm_stderr,none": 0.014770821817934647,
						"acc_stderr,none": 0.014770821817934647,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.04408440022768077,
						"acc_stderr,none": 0.04408440022768077,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.296,
						"acc_norm,none": 0.296,
						"acc_norm_stderr,none": 0.01444273494157502,
						"acc_stderr,none": 0.01444273494157502,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.327,
						"acc_norm,none": 0.327,
						"acc_norm_stderr,none": 0.014842213153411244,
						"acc_stderr,none": 0.014842213153411244,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.014326941797231558,
						"acc_stderr,none": 0.014326941797231558,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920842,
						"acc_stderr,none": 0.013512312258920842,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485256,
						"acc_stderr,none": 0.014174516461485256,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.249,
						"acc_norm,none": 0.249,
						"acc_norm_stderr,none": 0.0136816002787023,
						"acc_stderr,none": 0.0136816002787023,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.01834559715276358,
						"acc_stderr,none": 0.01834559715276358,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.283,
						"acc_norm,none": 0.283,
						"acc_norm_stderr,none": 0.014251810906481747,
						"acc_stderr,none": 0.014251810906481747,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.299,
						"acc_norm,none": 0.299,
						"acc_norm_stderr,none": 0.014484778521220475,
						"acc_stderr,none": 0.014484778521220475,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.014370995982377935,
						"acc_stderr,none": 0.014370995982377935,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.315,
						"acc_norm,none": 0.315,
						"acc_norm_stderr,none": 0.0146966319607925,
						"acc_stderr,none": 0.0146966319607925,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.2833333333333333,
						"acc_norm,none": 0.2833333333333333,
						"acc_norm_stderr,none": 0.026059845940064965,
						"acc_stderr,none": 0.026059845940064965,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145148,
						"acc_stderr,none": 0.013979965645145148,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.294,
						"acc_norm,none": 0.294,
						"acc_norm_stderr,none": 0.014414290540008206,
						"acc_stderr,none": 0.014414290540008206,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.292,
						"acc_norm,none": 0.292,
						"acc_norm_stderr,none": 0.014385511563477338,
						"acc_stderr,none": 0.014385511563477338,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.030695456590127176,
						"acc_stderr,none": 0.030695456590127176,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306263,
						"acc_stderr,none": 0.014158794845306263,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.274,
						"acc_norm,none": 0.274,
						"acc_norm_stderr,none": 0.014111099288259581,
						"acc_stderr,none": 0.014111099288259581,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.02960162633044062,
						"acc_stderr,none": 0.02960162633044062,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.363,
						"acc_norm,none": 0.363,
						"acc_norm_stderr,none": 0.015213890444671285,
						"acc_stderr,none": 0.015213890444671285,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5178688884016663,
						"acc_norm,none": 0.534,
						"acc_norm_stderr,none": 0.0004986853707414854,
						"acc_stderr,none": 0.04777794873787563,
						"alias": "kobest",
						"f1,none": 0.4151546842287524,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5142450142450142,
						"acc_stderr,none": 0.013343348923385135,
						"alias": " - kobest_boolq",
						"f1,none": 0.3630747126436782,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.621,
						"acc_stderr,none": 0.015349091002225349,
						"alias": " - kobest_copa",
						"f1,none": 0.6200116503024367,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.4,
						"acc_norm,none": 0.534,
						"acc_norm_stderr,none": 0.02233126442325838,
						"acc_stderr,none": 0.0219308441207285,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.395654013345738,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5138539042821159,
						"acc_stderr,none": 0.0251162986508672,
						"alias": " - kobest_sentineg",
						"f1,none": 0.38449612403100775,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7105569571123618,
						"acc_stderr,none": 0.01761325953202111,
						"alias": "lambada",
						"perplexity,none": 3.7897795938087406,
						"perplexity_stderr,none": 0.24737225124907478
					},
					"lambada_cloze": {
						"acc,none": 0.021637880846109063,
						"acc_stderr,none": 0.00731864283212087,
						"alias": "lambada_cloze",
						"perplexity,none": 788.7463760510478,
						"perplexity_stderr,none": 278.1164648319947
					},
					"lambada_multilingual": {
						"acc,none": 0.5342518921016883,
						"acc_stderr,none": 0.08167388307393524,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.816559476450905,
						"perplexity_stderr,none": 8.46429772565319
					},
					"lambada_openai": {
						"acc,none": 0.7436444789443043,
						"acc_stderr,none": 0.0060829758093544775,
						"alias": " - lambada_openai",
						"perplexity,none": 3.321400417003482,
						"perplexity_stderr,none": 0.06492495717505936
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.03570735493887056,
						"acc_stderr,none": 0.002585207101453027,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 235.16671086753593,
						"perplexity_stderr,none": 6.9547892059579555
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42286046962934215,
						"acc_stderr,none": 0.0068825761876977875,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 36.483283823728186,
						"perplexity_stderr,none": 2.0376004562360954
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7436444789443043,
						"acc_stderr,none": 0.006082975809354469,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.320261054979718,
						"perplexity_stderr,none": 0.06501496272361941
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4525519115078595,
						"acc_stderr,none": 0.006934541419085972,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.120332769595166,
						"perplexity_stderr,none": 1.4327755725412912
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5447312245294004,
						"acc_stderr,none": 0.006938045450999905,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 17.177800007349973,
						"perplexity_stderr,none": 0.8397454995179787
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5074713758975354,
						"acc_stderr,none": 0.00696519991171879,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.981119726601477,
						"perplexity_stderr,none": 1.2293005278589404
					},
					"lambada_standard": {
						"acc,none": 0.6776634969920434,
						"acc_stderr,none": 0.006511392337291411,
						"alias": " - lambada_standard",
						"perplexity,none": 4.259554026807786,
						"perplexity_stderr,none": 0.08842960860745254
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.007568406753347565,
						"acc_stderr,none": 0.0012074373217599813,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 1342.3260412345599,
						"perplexity_stderr,none": 37.539532539601495
					},
					"logiqa": {
						"acc,none": 0.22427035330261136,
						"acc_norm,none": 0.26881720430107525,
						"acc_norm_stderr,none": 0.01738940946371262,
						"acc_stderr,none": 0.016360043348265504,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2582697201017812,
						"acc_norm,none": 0.27162849872773537,
						"acc_norm_stderr,none": 0.011222149412328528,
						"acc_stderr,none": 0.011042608058378041,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.27403685092127306,
						"acc_norm,none": 0.27403685092127306,
						"acc_norm_stderr,none": 0.008165116067449043,
						"acc_stderr,none": 0.008165116067449045,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.49163312857445457,
						"acc_stderr,none": 0.005145175486537461,
						"alias": "mc_taco",
						"f1,none": 0.5359628770301624,
						"f1_stderr,none": 0.0059571337343539785
					},
					"medmcqa": {
						"acc,none": 0.30050203203442505,
						"acc_norm,none": 0.30050203203442505,
						"acc_norm_stderr,none": 0.007089652046133592,
						"acc_stderr,none": 0.007089652046133592,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.2749410840534171,
						"acc_norm,none": 0.2749410840534171,
						"acc_norm_stderr,none": 0.012518806115871497,
						"acc_stderr,none": 0.012518806115871497,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.3291553909699473,
						"acc_stderr,none": 0.05925599958722308,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.37777777777777777,
						"acc_stderr,none": 0.04188307537595853,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3157894736842105,
						"acc_stderr,none": 0.0378272898086547,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.39245283018867927,
						"acc_stderr,none": 0.03005258057955784,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.03852084696008534,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145633,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.28901734104046245,
						"acc_stderr,none": 0.03456425745087,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.28431372549019607,
						"acc_stderr,none": 0.04488482852329017,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3404255319148936,
						"acc_stderr,none": 0.03097669299853442,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03947152782669415,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.31724137931034485,
						"acc_stderr,none": 0.03878352372138622,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24338624338624337,
						"acc_stderr,none": 0.022101128787415426,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.30952380952380953,
						"acc_stderr,none": 0.04134913018303316,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4129032258064516,
						"acc_stderr,none": 0.02800913812540039,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2561576354679803,
						"acc_stderr,none": 0.0307127300709826,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3939393939393939,
						"acc_stderr,none": 0.03815494308688932,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.398989898989899,
						"acc_stderr,none": 0.03488901616852731,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.44559585492227977,
						"acc_stderr,none": 0.03587014986075659,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.31025641025641026,
						"acc_stderr,none": 0.023454674889404288,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.22592592592592592,
						"acc_stderr,none": 0.025497532639609553,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2773109243697479,
						"acc_stderr,none": 0.029079374539480007,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.25165562913907286,
						"acc_stderr,none": 0.035433042343899844,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3651376146788991,
						"acc_stderr,none": 0.02064280145438401,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.23148148148148148,
						"acc_stderr,none": 0.02876511171804697,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.39705882352941174,
						"acc_stderr,none": 0.03434131164719129,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3628691983122363,
						"acc_stderr,none": 0.03129920825530213,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.37668161434977576,
						"acc_stderr,none": 0.032521134899291884,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.4198473282442748,
						"acc_stderr,none": 0.04328577215262971,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.31179596174282675,
						"acc_stderr,none": 0.047353702623963434,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.371900826446281,
						"acc_stderr,none": 0.044120158066245044,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04557239513497752,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.34355828220858897,
						"acc_stderr,none": 0.037311335196738925,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3392857142857143,
						"acc_stderr,none": 0.04493949068613539,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.42718446601941745,
						"acc_stderr,none": 0.04897957737781168,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.41452991452991456,
						"acc_stderr,none": 0.03227396567623778,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.45338441890166026,
						"acc_stderr,none": 0.01780208713585031,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3265895953757225,
						"acc_stderr,none": 0.025248264774242826,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24134078212290502,
						"acc_stderr,none": 0.014310999547961466,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3758169934640523,
						"acc_stderr,none": 0.027732834353363947,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.37141937560347604,
						"acc_stderr,none": 0.060670398264925963,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4115755627009646,
						"acc_stderr,none": 0.027950481494401266,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.02622964917882116,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2553191489361702,
						"acc_stderr,none": 0.02601199293090201,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.27640156453715775,
						"acc_stderr,none": 0.01142215319455358,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3382352941176471,
						"acc_stderr,none": 0.028739328513983572,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3366013071895425,
						"acc_stderr,none": 0.019117213911495155,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.0469237132203465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.23673469387755103,
						"acc_stderr,none": 0.027212835884073142,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.34546636334091646,
						"acc_stderr,none": 0.057415569628345986,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.4228855721393035,
						"acc_stderr,none": 0.034932317774212816,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2974944497304155,
						"acc_stderr,none": 0.06047221629841026,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145633,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.30120481927710846,
						"acc_stderr,none": 0.03571609230053481,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.4093567251461988,
						"acc_stderr,none": 0.03771283107626545,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.6801833927661742,
						"acc_stderr,none": 0.00470804376276044,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.6819568755085436,
						"acc_stderr,none": 0.004697022210261883,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7426470588235294,
						"acc_stderr,none": 0.021669984270659748,
						"alias": "mrpc",
						"f1,none": 0.8154657293497364,
						"f1_stderr,none": 0.017751424175230672
					},
					"multimedqa": {
						"acc,none": 0.3337118523775727,
						"acc_norm,none": 0.29263045073774707,
						"acc_norm_stderr,none": 0.00010988268919969268,
						"acc_stderr,none": 0.10512957609472445,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5429042904290429,
						"acc_stderr,none": 0.007155314043119362,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7013732145790591,
						"mrr_stderr,none": 0.01034654269593149,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4153498871331828,
						"r@2_stderr,none": 0.01656469454977273
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.651241536636923,
						"mrr_stderr,none": 0.0104004677611456,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.47742663656884876,
						"r@2_stderr,none": 0.016790178837117337
					},
					"openbookqa": {
						"acc,none": 0.302,
						"acc_norm,none": 0.41,
						"acc_norm_stderr,none": 0.022017482578127683,
						"acc_stderr,none": 0.020553269174209188,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4325,
						"acc_stderr,none": 0.01108076110352171,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3775,
						"acc_stderr,none": 0.010842308463902533,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.011127079848413735,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.011130400617630758,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.011127079848413744,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.011135708419359796,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5165,
						"acc_stderr,none": 0.011177045144808297,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4887142857142857,
						"acc_stderr,none": 0.05361451505014049,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7676822633297062,
						"acc_norm,none": 0.7709466811751904,
						"acc_norm_stderr,none": 0.009804509865175504,
						"acc_stderr,none": 0.009853201384168241,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.25048035866780527,
						"acc_norm,none": 0.29819598633646455,
						"acc_norm_stderr,none": 0.003342196437272724,
						"acc_stderr,none": 0.0031655669175092403,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.736,
						"acc_stderr,none": 0.0197328855859221,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7237925278695598,
						"acc_norm,none": 0.6293903468366571,
						"acc_norm_stderr,none": 0.009909342146020217,
						"acc_stderr,none": 0.15519284847316417,
						"alias": "pythia",
						"bits_per_byte,none": 0.6345824922381478,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5524884036477866,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.321400417003482,
						"perplexity_stderr,none": 0.06492495717505936,
						"word_perplexity,none": 10.507871469030745,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3882978723404255,
						"acc_norm,none": 0.45390070921985815,
						"acc_norm_stderr,none": 0.05286947704489444,
						"acc_stderr,none": 0.042424013480532444,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.44166666666666665,
						"acc_norm,none": 0.5416666666666666,
						"acc_norm_stderr,none": 0.04567549854280213,
						"acc_stderr,none": 0.04552192400253556,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.35625,
						"acc_norm,none": 0.4625,
						"acc_norm_stderr,none": 0.039540899134978165,
						"acc_stderr,none": 0.03797847267587851,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.38380281690140844,
						"acc_norm,none": 0.4119718309859155,
						"acc_norm_stderr,none": 0.029257661342092615,
						"acc_stderr,none": 0.028908177688046176,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.49679663188724144,
						"acc_stderr,none": 0.006765271702920654,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.6899826861241652,
						"acc_stderr,none": 0.0023001998530882183,
						"alias": "qqp",
						"f1,none": 0.6763247598388596,
						"f1_stderr,none": 0.0027292511925375925
					},
					"race": {
						"acc,none": 0.35119617224880384,
						"acc_stderr,none": 0.014773430019036974,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2716,
						"em_stderr,none": 0.004448069528711353,
						"f1,none": 0.2813485716730356,
						"f1_stderr,none": 0.004458690917246618
					},
					"rte": {
						"acc,none": 0.7003610108303249,
						"acc_stderr,none": 0.027574370145292605,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.943,
						"acc_norm,none": 0.921,
						"acc_norm_stderr,none": 0.008534156773333442,
						"acc_stderr,none": 0.007335175853706821,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6967509025270758,
						"acc_stderr,none": 0.027668396293593706,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.9002293577981652,
						"acc_stderr,none": 0.010154741963033087,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5732280315905228,
						"acc_norm,none": 0.7694691592522244,
						"acc_norm_stderr,none": 0.002977767928354148,
						"acc_stderr,none": 0.0034969737721460906,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6128248643971914,
						"acc_stderr,none": 0.0592801269883518,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5425681089743589,
						"acc_stderr,none": 0.0049860865827654295,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.7676092023918111,
						"acc_stderr,none": 0.004252154088503642,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5318627450980392,
						"acc_stderr,none": 0.004940917376708861,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.30906967456484424,
						"acc_stderr,none": 0.0012800202812191187,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3047735618115055,
						"bleu_acc_stderr,none": 0.016114124156882452,
						"bleu_diff,none": -8.391143762370323,
						"bleu_diff_stderr,none": 0.8837056284781842,
						"bleu_max,none": 27.248776418132984,
						"bleu_max_stderr,none": 0.8178494563754359,
						"rouge1_acc,none": 0.2717258261933905,
						"rouge1_acc_stderr,none": 0.015572840452875823,
						"rouge1_diff,none": -10.631012681766778,
						"rouge1_diff_stderr,none": 0.944916002488598,
						"rouge1_max,none": 52.48414434315671,
						"rouge1_max_stderr,none": 0.8776883037782627,
						"rouge2_acc,none": 0.2558139534883721,
						"rouge2_acc_stderr,none": 0.015274176219283352,
						"rouge2_diff,none": -12.816335562870293,
						"rouge2_diff_stderr,none": 1.148673224638078,
						"rouge2_max,none": 36.39645976090811,
						"rouge2_max_stderr,none": 1.034015951509814,
						"rougeL_acc,none": 0.26805385556915545,
						"rougeL_acc_stderr,none": 0.015506204722834543,
						"rougeL_diff,none": -11.093881938850641,
						"rougeL_diff_stderr,none": 0.9569023141638676,
						"rougeL_max,none": 49.6021140078436,
						"rougeL_max_stderr,none": 0.8999268428171971
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3047735618115055,
						"bleu_acc_stderr,none": 0.016114124156882452,
						"bleu_diff,none": -8.391143762370323,
						"bleu_diff_stderr,none": 0.8837056284781842,
						"bleu_max,none": 27.248776418132984,
						"bleu_max_stderr,none": 0.8178494563754359,
						"rouge1_acc,none": 0.2717258261933905,
						"rouge1_acc_stderr,none": 0.015572840452875823,
						"rouge1_diff,none": -10.631012681766778,
						"rouge1_diff_stderr,none": 0.944916002488598,
						"rouge1_max,none": 52.48414434315671,
						"rouge1_max_stderr,none": 0.8776883037782627,
						"rouge2_acc,none": 0.2558139534883721,
						"rouge2_acc_stderr,none": 0.015274176219283352,
						"rouge2_diff,none": -12.816335562870293,
						"rouge2_diff_stderr,none": 1.148673224638078,
						"rouge2_max,none": 36.39645976090811,
						"rouge2_max_stderr,none": 1.034015951509814,
						"rougeL_acc,none": 0.26805385556915545,
						"rougeL_acc_stderr,none": 0.015506204722834543,
						"rougeL_diff,none": -11.093881938850641,
						"rougeL_diff_stderr,none": 0.9569023141638676,
						"rougeL_max,none": 49.6021140078436,
						"rougeL_max_stderr,none": 0.8999268428171971
					},
					"truthfulqa_mc1": {
						"acc,none": 0.24357405140758873,
						"acc_stderr,none": 0.01502635482491078,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.37456529772209973,
						"acc_stderr,none": 0.013724105460822242,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.04822834645669291,
						"exact_match_stderr,none": 0.00475403754675294
					},
					"wic": {
						"acc,none": 0.5266457680250783,
						"acc_stderr,none": 0.019782570188812167,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.634564605653323,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5524691559602324,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.507174842472516,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6890292028413575,
						"acc_stderr,none": 0.013009534736286072,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.4230769230769231,
						"acc_stderr,none": 0.04867993747918684,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8608058608058609,
						"acc_stderr,none": 0.020988366070850997,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.618181818181818,
						"acc_stderr,none": 0.07286227685763978,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.021966635293832915,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.02237429816635319,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.020186703693570847,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.01987435483128748,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.02231833811987053,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.02204949796982787,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.021513662527582404,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177524,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.02055326917420918,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4372958500669344,
						"acc_stderr,none": 0.0515984106636476,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.336144578313253,
						"acc_stderr,none": 0.009468634669293527,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4811244979919679,
						"acc_stderr,none": 0.010014928901071309,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4923694779116466,
						"acc_stderr,none": 0.010020905731542306,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.37991967871485943,
						"acc_stderr,none": 0.009728758452987867,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5236947791164659,
						"acc_stderr,none": 0.010010812905412048,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5024096385542168,
						"acc_stderr,none": 0.010021956483068096,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5020080321285141,
						"acc_stderr,none": 0.010021992045038411,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41887550200803214,
						"acc_stderr,none": 0.009889278882314567,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.491566265060241,
						"acc_stderr,none": 0.01002064706811417,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39799196787148594,
						"acc_stderr,none": 0.00981128402642558,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42530120481927713,
						"acc_stderr,none": 0.00990959719222113,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4461847389558233,
						"acc_stderr,none": 0.009963854274139159,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41044176706827307,
						"acc_stderr,none": 0.009859994672585129,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40200803212851405,
						"acc_stderr,none": 0.00982771587348473,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.00955664246013815,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6242705011732147,
						"acc_stderr,none": 0.06068365480176174,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5837193911317009,
						"acc_stderr,none": 0.012685473350967527,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7723362011912641,
						"acc_stderr,none": 0.010791000466746428,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7081403044341495,
						"acc_stderr,none": 0.011699256037649382,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5612177365982793,
						"acc_stderr,none": 0.012770319186938004,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.598941098610192,
						"acc_stderr,none": 0.01261268831876705,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6604897418927862,
						"acc_stderr,none": 0.012186276146659451,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5420251489080079,
						"acc_stderr,none": 0.012821595164245277,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6763732627399074,
						"acc_stderr,none": 0.012040012546210333,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5499669093315684,
						"acc_stderr,none": 0.012802713598219839,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5909993381866314,
						"acc_stderr,none": 0.012652228567132374,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6227663798808736,
						"acc_stderr,none": 0.01247324065474119,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8156889188581704,
						"acc_stderr,none": 0.04566655631378652,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8735483870967742,
						"acc_stderr,none": 0.006894259207826395,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.0506639425494172,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7539103232533889,
						"acc_stderr,none": 0.01391630019105949,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7718631178707225,
						"acc_stderr,none": 0.02592490955924428,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.692063492063492,
						"acc_stderr,none": 0.026051860027264458,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7857142857142857,
						"acc_stderr,none": 0.01829552775577619,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C0-PRERUN-rwkv-450_pth"
	},
	"./rwkv-x-dev/1_3-C0-PRERUN-rwkv-60_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6265501691093573,
						"acc_norm,none": 0.6237316798196166,
						"acc_norm_stderr,none": 0.09140588016411445,
						"acc_stderr,none": 0.1082565631089483,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4359375,
						"acc_stderr,none": 0.04153160258123746,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.12625,
						"acc_stderr,none": 0.10984383787676448,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8115671641791045,
						"acc_stderr,none": 0.1750514853970583,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.25854383358098065,
						"acc_norm,none": 0.25854383358098065,
						"acc_norm_stderr,none": 0.11597590452433618,
						"acc_stderr,none": 0.11597590452433618,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2841478155758938,
						"acc_norm,none": 0.2841478155758938,
						"acc_norm_stderr,none": 0.04877155978378774,
						"acc_stderr,none": 0.04877155978378774,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4560599284436493,
						"likelihood_diff_stderr,none": 0.5131033709571705,
						"pct_stereotype,none": 0.6217948717948718,
						"pct_stereotype_stderr,none": 0.0651643139614342
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05757874015748032,
						"exact_match_stderr,none": 0.00516890624287098
					},
					"glue": {
						"acc,none": 0.7065120295378752,
						"acc_stderr,none": 0.003436509697786032,
						"alias": "glue",
						"f1,none": 0.6947152814274697,
						"f1_stderr,none": 0.00016675963600054698,
						"mcc,none": 0.12013047900544432,
						"mcc_stderr,none": 0.03382497532337989
					},
					"kmmlu": {
						"acc,none": 0.25012994513427667,
						"acc_norm,none": 0.25012994513427667,
						"acc_norm_stderr,none": 0.02262842633634709,
						"acc_stderr,none": 0.02262842633634709,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5204998903749177,
						"acc_norm,none": 0.57,
						"acc_norm_stderr,none": 0.0004911823647294576,
						"acc_stderr,none": 0.04487013541173626,
						"alias": "kobest",
						"f1,none": 0.4159562277418555,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7061905686008151,
						"acc_stderr,none": 0.020503302074608393,
						"alias": "lambada",
						"perplexity,none": 3.896506803042753,
						"perplexity_stderr,none": 0.3119414766884127
					},
					"lambada_cloze": {
						"acc,none": 0.024451775664661363,
						"acc_stderr,none": 0.007959183066224835,
						"alias": "lambada_cloze",
						"perplexity,none": 819.623888493164,
						"perplexity_stderr,none": 288.849890266428
					},
					"lambada_multilingual": {
						"acc,none": 0.5339802057054143,
						"acc_stderr,none": 0.08674646014953317,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.41860062826282,
						"perplexity_stderr,none": 8.523281711849819
					},
					"mmlu": {
						"acc,none": 0.3364193134881071,
						"acc_stderr,none": 0.058093684881406774,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3158342189160467,
						"acc_stderr,none": 0.05090966172779643,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.38879948503379463,
						"acc_stderr,none": 0.050518910535749854,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.35326616834579133,
						"acc_stderr,none": 0.05073564748965906,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2990802410402791,
						"acc_stderr,none": 0.05948001519263436,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.34790631653655074,
						"acc_norm,none": 0.30428293576946597,
						"acc_norm_stderr,none": 0.00011771135739860012,
						"acc_stderr,none": 0.10114453543139962,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4839285714285714,
						"acc_stderr,none": 0.05538516470036501,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7250397905931789,
						"acc_norm,none": 0.6277400828170845,
						"acc_norm_stderr,none": 0.01032729108793745,
						"acc_stderr,none": 0.1617880311531066,
						"alias": "pythia",
						"bits_per_byte,none": 0.6348474372758655,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5527735379681435,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2930962387368963,
						"perplexity_stderr,none": 0.06415043605354118,
						"word_perplexity,none": 10.518195660526436,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.374113475177305,
						"acc_norm,none": 0.4592198581560284,
						"acc_norm_stderr,none": 0.06807781241656956,
						"acc_stderr,none": 0.04750428220910393,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6151209610329107,
						"acc_stderr,none": 0.062262283044830016,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3092432162173067,
						"acc_stderr,none": 0.0014096591931260962,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.29498164014687883,
						"bleu_acc_stderr,none": 0.015964400965589653,
						"bleu_diff,none": -9.021925889680018,
						"bleu_diff_stderr,none": 0.8616559883588482,
						"bleu_max,none": 26.377066983232638,
						"bleu_max_stderr,none": 0.8110227326337474,
						"rouge1_acc,none": 0.25458996328029376,
						"rouge1_acc_stderr,none": 0.015250117079156494,
						"rouge1_diff,none": -11.365414337161942,
						"rouge1_diff_stderr,none": 0.8977534506495294,
						"rouge1_max,none": 51.55107668629475,
						"rouge1_max_stderr,none": 0.8780678390317714,
						"rouge2_acc,none": 0.24112607099143207,
						"rouge2_acc_stderr,none": 0.014974827279752346,
						"rouge2_diff,none": -13.868278994671925,
						"rouge2_diff_stderr,none": 1.102732294268251,
						"rouge2_max,none": 35.2308501114606,
						"rouge2_max_stderr,none": 1.027205391205865,
						"rougeL_acc,none": 0.25458996328029376,
						"rougeL_acc_stderr,none": 0.015250117079156496,
						"rougeL_diff,none": -11.836143289677882,
						"rougeL_diff_stderr,none": 0.9128014202052973,
						"rougeL_max,none": 48.640008568398954,
						"rougeL_max_stderr,none": 0.8967200412459587
					},
					"xcopa": {
						"acc,none": 0.616909090909091,
						"acc_stderr,none": 0.07011057546720761,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.44147255689424364,
						"acc_stderr,none": 0.05140894932634247,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6246314902833765,
						"acc_stderr,none": 0.05975167650980203,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8168127669139132,
						"acc_stderr,none": 0.03793769487537483,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6265501691093573,
						"acc_norm,none": 0.6237316798196166,
						"acc_norm_stderr,none": 0.09140588016411445,
						"acc_stderr,none": 0.1082565631089483,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4359375,
						"acc_stderr,none": 0.04153160258123746,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.01581119837311488,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.426,
						"acc_stderr,none": 0.015645087688113814,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3775,
						"acc_stderr,none": 0.013999694682718618,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.39761092150170646,
						"acc_norm,none": 0.4308873720136519,
						"acc_norm_stderr,none": 0.01447113339264248,
						"acc_stderr,none": 0.014301752223279528,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7394781144781145,
						"acc_norm,none": 0.7188552188552189,
						"acc_norm_stderr,none": 0.009224735470287005,
						"acc_stderr,none": 0.009006435890336595,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.12625,
						"acc_stderr,none": 0.10984383787676448,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.058,
						"acc_stderr,none": 0.00522796957077193,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.4105,
						"acc_stderr,none": 0.011002518016406629,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.149,
						"acc_stderr,none": 0.00796437163923195,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.3175,
						"acc_stderr,none": 0.010411583719001251,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0715,
						"acc_stderr,none": 0.005762853480708959,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.091,
						"acc_stderr,none": 0.006432743590028105,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.024,
						"acc_stderr,none": 0.0034231358327511726,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0745,
						"acc_stderr,none": 0.005872999324070262,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.027,
						"acc_stderr,none": 0.0036251994476880355,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.039,
						"acc_stderr,none": 0.004329997048176558,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.006073752711496746,
						"acc_stderr,none": 0.0016186926522842547,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8115671641791045,
						"acc_stderr,none": 0.1750514853970583,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592076,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689087,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437603,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.01141991306509871,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139995,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.012795613612786534,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.571,
						"acc_stderr,none": 0.015658997547870243,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.01110798754893915,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713329,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469362,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767784,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427417,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397214,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240663,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318237,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713329,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.656,
						"acc_stderr,none": 0.015029633724408948,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.68,
						"acc_stderr,none": 0.014758652303574885,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.735,
						"acc_stderr,none": 0.013963164754809949,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.0073953154557929515,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381793,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727058,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.01493311749093257,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248128,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.012259457340938598,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.015635487471405186,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.701,
						"acc_stderr,none": 0.014484778521220473,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592069,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381793,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333356,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074801,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.681,
						"acc_stderr,none": 0.014746404865473494,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248095,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.354,
						"acc_stderr,none": 0.015129868238451773,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.581,
						"acc_stderr,none": 0.015610338967577797,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.501,
						"acc_stderr,none": 0.01581926829057683,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.013699915608779773,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.015818160898606715,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653874,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787735,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.751,
						"acc_stderr,none": 0.013681600278702308,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.00823035471524407,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578054,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651532,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160336,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.01575792855397917,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280313,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397339,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274702,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.693,
						"acc_stderr,none": 0.014593284892852623,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.444,
						"acc_stderr,none": 0.01571976816340209,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406126,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.015445859463771293,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.011107987548939149,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037186,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.014174516461485254,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.011266140684632163,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427419,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.00823035471524407,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030036,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274702,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.015275252316519362,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.307,
						"acc_stderr,none": 0.014593284892852627,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7192660550458716,
						"acc_stderr,none": 0.007859316642849508,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8035714285714286,
						"acc_stderr,none": 0.05357142857142858,
						"alias": "cb",
						"f1,none": 0.6494078188869091,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.25854383358098065,
						"acc_norm,none": 0.25854383358098065,
						"acc_norm_stderr,none": 0.11597590452433618,
						"acc_stderr,none": 0.11597590452433618,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966263,
						"acc_stderr,none": 0.06520506636966263,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.3939393939393939,
						"acc_norm,none": 0.3939393939393939,
						"acc_norm_stderr,none": 0.08637692614387409,
						"acc_stderr,none": 0.08637692614387409,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757577,
						"acc_stderr,none": 0.07575757575757577,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.19148936170212766,
						"acc_norm,none": 0.19148936170212766,
						"acc_norm_stderr,none": 0.05801446334976932,
						"acc_stderr,none": 0.05801446334976932,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.052486388108147784,
						"acc_stderr,none": 0.052486388108147784,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.06861056852129649,
						"acc_stderr,none": 0.06861056852129649,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.11967838846954226,
						"acc_stderr,none": 0.11967838846954226,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857371,
						"acc_stderr,none": 0.08982552969857371,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.06861056852129647,
						"acc_stderr,none": 0.06861056852129647,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633637,
						"acc_stderr,none": 0.09169709590633637,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.07335878043508444,
						"acc_stderr,none": 0.07335878043508444,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.3695652173913043,
						"acc_norm,none": 0.3695652173913043,
						"acc_norm_stderr,none": 0.07195473383945741,
						"acc_stderr,none": 0.07195473383945741,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2841478155758938,
						"acc_norm,none": 0.2841478155758938,
						"acc_norm_stderr,none": 0.04877155978378774,
						"acc_stderr,none": 0.04877155978378774,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676976,
						"acc_stderr,none": 0.03410167836676976,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2905405405405405,
						"acc_norm,none": 0.2905405405405405,
						"acc_norm_stderr,none": 0.03744626397928733,
						"acc_stderr,none": 0.03744626397928733,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.39375,
						"acc_norm,none": 0.39375,
						"acc_norm_stderr,none": 0.03874695666685831,
						"acc_stderr,none": 0.03874695666685831,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2606060606060606,
						"acc_norm,none": 0.2606060606060606,
						"acc_norm_stderr,none": 0.03427743175816525,
						"acc_stderr,none": 0.03427743175816525,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.31100478468899523,
						"acc_norm,none": 0.31100478468899523,
						"acc_norm_stderr,none": 0.03209666953348979,
						"acc_stderr,none": 0.03209666953348979,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.03515674134876764,
						"acc_stderr,none": 0.03515674134876764,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2748091603053435,
						"acc_norm,none": 0.2748091603053435,
						"acc_norm_stderr,none": 0.039153454088478354,
						"acc_stderr,none": 0.039153454088478354,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.04026377210787311,
						"acc_stderr,none": 0.04026377210787311,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2897196261682243,
						"acc_norm,none": 0.2897196261682243,
						"acc_norm_stderr,none": 0.0440606533474851,
						"acc_stderr,none": 0.0440606533474851,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.025392108664806786,
						"acc_stderr,none": 0.025392108664806786,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.03166009679399813,
						"acc_stderr,none": 0.03166009679399813,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.29608938547486036,
						"acc_norm,none": 0.29608938547486036,
						"acc_norm_stderr,none": 0.03421843754304871,
						"acc_stderr,none": 0.03421843754304871,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24050632911392406,
						"acc_norm,none": 0.24050632911392406,
						"acc_norm_stderr,none": 0.027820781981149675,
						"acc_stderr,none": 0.027820781981149675,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800375,
						"acc_stderr,none": 0.04142972007800375,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004222,
						"acc_stderr,none": 0.04742907046004222,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2037037037037037,
						"acc_norm,none": 0.2037037037037037,
						"acc_norm_stderr,none": 0.03893542518824847,
						"acc_stderr,none": 0.03893542518824847,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2761904761904762,
						"acc_norm,none": 0.2761904761904762,
						"acc_norm_stderr,none": 0.043842955869188835,
						"acc_stderr,none": 0.043842955869188835,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.26373626373626374,
						"acc_norm,none": 0.26373626373626374,
						"acc_norm_stderr,none": 0.026718814072967532,
						"acc_stderr,none": 0.026718814072967532,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.03166009679399812,
						"acc_stderr,none": 0.03166009679399812,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.25146198830409355,
						"acc_norm,none": 0.25146198830409355,
						"acc_norm_stderr,none": 0.033275044238468436,
						"acc_stderr,none": 0.033275044238468436,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.29931972789115646,
						"acc_norm,none": 0.29931972789115646,
						"acc_norm_stderr,none": 0.03790104530910391,
						"acc_stderr,none": 0.03790104530910391,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.26618705035971224,
						"acc_norm,none": 0.26618705035971224,
						"acc_norm_stderr,none": 0.03762240935089089,
						"acc_stderr,none": 0.03762240935089089,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.03652215878407506,
						"acc_stderr,none": 0.03652215878407506,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.31901840490797545,
						"acc_norm,none": 0.31901840490797545,
						"acc_norm_stderr,none": 0.03661997551073836,
						"acc_stderr,none": 0.03661997551073836,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2616279069767442,
						"acc_norm,none": 0.2616279069767442,
						"acc_norm_stderr,none": 0.03361101403890495,
						"acc_stderr,none": 0.03361101403890495,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2896825396825397,
						"acc_norm,none": 0.2896825396825397,
						"acc_norm_stderr,none": 0.028631924753360995,
						"acc_stderr,none": 0.028631924753360995,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.25252525252525254,
						"acc_norm,none": 0.25252525252525254,
						"acc_norm_stderr,none": 0.030954055470365907,
						"acc_stderr,none": 0.030954055470365907,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3865546218487395,
						"acc_norm,none": 0.3865546218487395,
						"acc_norm_stderr,none": 0.0316314580755238,
						"acc_stderr,none": 0.0316314580755238,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.20869565217391303,
						"acc_norm,none": 0.20869565217391303,
						"acc_norm_stderr,none": 0.02685410826543965,
						"acc_stderr,none": 0.02685410826543965,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.24475524475524477,
						"acc_norm,none": 0.24475524475524477,
						"acc_norm_stderr,none": 0.03607993033081377,
						"acc_stderr,none": 0.03607993033081377,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.30113636363636365,
						"acc_norm,none": 0.30113636363636365,
						"acc_norm_stderr,none": 0.03467837977202437,
						"acc_stderr,none": 0.03467837977202437,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.28859060402684567,
						"acc_norm,none": 0.28859060402684567,
						"acc_norm_stderr,none": 0.03724517629698768,
						"acc_stderr,none": 0.03724517629698768,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.03980329854920432,
						"acc_stderr,none": 0.03980329854920432,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.035637888362588285,
						"acc_stderr,none": 0.035637888362588285,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.04069306319721376,
						"acc_stderr,none": 0.04069306319721376,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.27972027972027974,
						"acc_norm,none": 0.27972027972027974,
						"acc_norm_stderr,none": 0.03766763889539852,
						"acc_stderr,none": 0.03766763889539852,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2698412698412698,
						"acc_norm,none": 0.2698412698412698,
						"acc_norm_stderr,none": 0.03970158273235173,
						"acc_stderr,none": 0.03970158273235173,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.03253020905593335,
						"acc_stderr,none": 0.03253020905593335,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.034516288762506196,
						"acc_stderr,none": 0.034516288762506196,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26520681265206814,
						"acc_norm,none": 0.26520681265206814,
						"acc_norm_stderr,none": 0.021801329069745193,
						"acc_stderr,none": 0.021801329069745193,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.35046728971962615,
						"acc_norm,none": 0.35046728971962615,
						"acc_norm_stderr,none": 0.03269147055032477,
						"acc_stderr,none": 0.03269147055032477,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.04267895997763195,
						"acc_stderr,none": 0.04267895997763195,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.30327868852459017,
						"acc_norm,none": 0.30327868852459017,
						"acc_norm_stderr,none": 0.041788598786318756,
						"acc_stderr,none": 0.041788598786318756,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.031248474232843364,
						"acc_stderr,none": 0.031248474232843364,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.29444444444444445,
						"acc_norm,none": 0.29444444444444445,
						"acc_norm_stderr,none": 0.034067540013496884,
						"acc_stderr,none": 0.034067540013496884,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.291005291005291,
						"acc_norm,none": 0.291005291005291,
						"acc_norm_stderr,none": 0.033127832003565685,
						"acc_stderr,none": 0.033127832003565685,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.04126514736324099,
						"acc_stderr,none": 0.04126514736324099,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.25517241379310346,
						"acc_norm,none": 0.25517241379310346,
						"acc_norm_stderr,none": 0.03632984052707842,
						"acc_stderr,none": 0.03632984052707842,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3238095238095238,
						"acc_norm,none": 0.3238095238095238,
						"acc_norm_stderr,none": 0.04588414718067474,
						"acc_stderr,none": 0.04588414718067474,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.29714285714285715,
						"acc_norm,none": 0.29714285714285715,
						"acc_norm_stderr,none": 0.034645078898843724,
						"acc_stderr,none": 0.034645078898843724,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767256,
						"acc_stderr,none": 0.030113040167767256,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2526595744680851,
						"acc_norm,none": 0.2526595744680851,
						"acc_norm_stderr,none": 0.022439412582786405,
						"acc_stderr,none": 0.022439412582786405,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3232758620689655,
						"acc_norm,none": 0.3232758620689655,
						"acc_norm_stderr,none": 0.030774179531794444,
						"acc_stderr,none": 0.030774179531794444,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03584022203803135,
						"acc_stderr,none": 0.03584022203803135,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.2920353982300885,
						"acc_norm,none": 0.2920353982300885,
						"acc_norm_stderr,none": 0.030313233223988514,
						"acc_stderr,none": 0.030313233223988514,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.03608541011573967,
						"acc_stderr,none": 0.03608541011573967,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2781065088757396,
						"acc_norm,none": 0.2781065088757396,
						"acc_norm_stderr,none": 0.034569054303762434,
						"acc_stderr,none": 0.034569054303762434,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2795031055900621,
						"acc_norm,none": 0.2795031055900621,
						"acc_norm_stderr,none": 0.035477203909303916,
						"acc_stderr,none": 0.035477203909303916,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.036121818481912725,
						"acc_stderr,none": 0.036121818481912725,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.12013047900544432,
						"mcc_stderr,none": 0.03382497532337989
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.03265986323710906,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4560599284436493,
						"likelihood_diff_stderr,none": 0.5131033709571705,
						"pct_stereotype,none": 0.6217948717948718,
						"pct_stereotype_stderr,none": 0.0651643139614342
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.681052474657126,
						"likelihood_diff_stderr,none": 0.08705570170078819,
						"pct_stereotype,none": 0.6422182468694096,
						"pct_stereotype_stderr,none": 0.011708827480368503
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.196428571428571,
						"likelihood_diff_stderr,none": 0.3932142621481852,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.6477272727272725,
						"likelihood_diff_stderr,none": 1.8768035403114574,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.996153846153846,
						"likelihood_diff_stderr,none": 0.6173204228258369,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.613671875,
						"likelihood_diff_stderr,none": 0.1581072863418556,
						"pct_stereotype,none": 0.640625,
						"pct_stereotype_stderr,none": 0.026864609422436472
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.578125,
						"likelihood_diff_stderr,none": 0.24041195111005145,
						"pct_stereotype,none": 0.5694444444444444,
						"pct_stereotype_stderr,none": 0.03376922151252335
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.8125,
						"likelihood_diff_stderr,none": 0.30918626476992594,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.5531496062992125,
						"likelihood_diff_stderr,none": 0.15285876437902213,
						"pct_stereotype,none": 0.5531496062992126,
						"pct_stereotype_stderr,none": 0.022079965811503375
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.5281531531531534,
						"likelihood_diff_stderr,none": 0.33825080547350106,
						"pct_stereotype,none": 0.7387387387387387,
						"pct_stereotype_stderr,none": 0.041887708614323976
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.060483870967742,
						"likelihood_diff_stderr,none": 0.4291730476373726,
						"pct_stereotype,none": 0.9032258064516129,
						"pct_stereotype_stderr,none": 0.03082364793244869
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.1723684210526315,
						"likelihood_diff_stderr,none": 0.25647823070198206,
						"pct_stereotype,none": 0.6631578947368421,
						"pct_stereotype_stderr,none": 0.03437880340748324
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.232334525939177,
						"likelihood_diff_stderr,none": 0.07473799220359553,
						"pct_stereotype,none": 0.6016696481812761,
						"pct_stereotype_stderr,none": 0.011958143055896412
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.245833333333333,
						"likelihood_diff_stderr,none": 0.3169575960052357,
						"pct_stereotype,none": 0.5888888888888889,
						"pct_stereotype_stderr,none": 0.05215564061107554
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.076923076923077,
						"likelihood_diff_stderr,none": 0.7263275949238945,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.890151515151516,
						"likelihood_diff_stderr,none": 0.4368357023896387,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.056159743502623156
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.789719626168224,
						"likelihood_diff_stderr,none": 0.13470389004858302,
						"pct_stereotype,none": 0.573208722741433,
						"pct_stereotype_stderr,none": 0.02764962041526109
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.3567193675889326,
						"likelihood_diff_stderr,none": 0.19998053331900753,
						"pct_stereotype,none": 0.466403162055336,
						"pct_stereotype_stderr,none": 0.031425854566559795
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.5694444444444446,
						"likelihood_diff_stderr,none": 0.4395206027994227,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.05594542388644592
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.797282608695652,
						"likelihood_diff_stderr,none": 0.1377416517884154,
						"pct_stereotype,none": 0.5304347826086957,
						"pct_stereotype_stderr,none": 0.023294726417873602
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.4684782608695652,
						"likelihood_diff_stderr,none": 0.27238127788311683,
						"pct_stereotype,none": 0.7913043478260869,
						"pct_stereotype_stderr,none": 0.03806063686277675
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.868131868131868,
						"likelihood_diff_stderr,none": 0.3201929329899415,
						"pct_stereotype,none": 0.8461538461538461,
						"pct_stereotype_stderr,none": 0.03803178711331106
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.6619897959183674,
						"likelihood_diff_stderr,none": 0.24617929867319763,
						"pct_stereotype,none": 0.7142857142857143,
						"pct_stereotype_stderr,none": 0.03235077240413133
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05757874015748032,
						"exact_match_stderr,none": 0.00516890624287098
					},
					"glue": {
						"acc,none": 0.7065120295378752,
						"acc_stderr,none": 0.003436509697786032,
						"alias": "glue",
						"f1,none": 0.6947152814274697,
						"f1_stderr,none": 0.00016675963600054698,
						"mcc,none": 0.12013047900544432,
						"mcc_stderr,none": 0.03382497532337989
					},
					"hellaswag": {
						"acc,none": 0.5381398127862975,
						"acc_norm,none": 0.724457279426409,
						"acc_norm_stderr,none": 0.004458742356237894,
						"acc_stderr,none": 0.004975243508752004,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.25012994513427667,
						"acc_norm,none": 0.25012994513427667,
						"acc_norm_stderr,none": 0.02262842633634709,
						"acc_stderr,none": 0.02262842633634709,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816508,
						"acc_stderr,none": 0.04229525846816508,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.238,
						"acc_norm,none": 0.238,
						"acc_norm_stderr,none": 0.013473586661967225,
						"acc_stderr,none": 0.013473586661967225,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.01364467578131413,
						"acc_stderr,none": 0.01364467578131413,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.253,
						"acc_norm,none": 0.253,
						"acc_norm_stderr,none": 0.013754278613587079,
						"acc_stderr,none": 0.013754278613587079,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.013912208651021349,
						"acc_stderr,none": 0.013912208651021349,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.01813969167387841,
						"acc_stderr,none": 0.01813969167387841,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.236,
						"acc_norm,none": 0.236,
						"acc_norm_stderr,none": 0.01343445140243869,
						"acc_stderr,none": 0.01343445140243869,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.014326941797231563,
						"acc_stderr,none": 0.014326941797231563,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.013569640199177443,
						"acc_stderr,none": 0.013569640199177443,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.31,
						"acc_norm,none": 0.31,
						"acc_norm_stderr,none": 0.032785276754349606,
						"acc_stderr,none": 0.032785276754349606,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2076923076923077,
						"acc_norm,none": 0.2076923076923077,
						"acc_norm_stderr,none": 0.03571595663393523,
						"acc_stderr,none": 0.03571595663393523,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.044619604333847415,
						"acc_stderr,none": 0.044619604333847415,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.227,
						"acc_norm,none": 0.227,
						"acc_norm_stderr,none": 0.013253174964763909,
						"acc_stderr,none": 0.013253174964763909,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145151,
						"acc_stderr,none": 0.013979965645145151,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.013493000446937591,
						"acc_stderr,none": 0.013493000446937591,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.227,
						"acc_norm,none": 0.227,
						"acc_norm_stderr,none": 0.013253174964763918,
						"acc_stderr,none": 0.013253174964763918,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.013663187134877654,
						"acc_stderr,none": 0.013663187134877654,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.253,
						"acc_norm,none": 0.253,
						"acc_norm_stderr,none": 0.01375427861358708,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.013825416526895042,
						"acc_stderr,none": 0.013825416526895042,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.04560480215720684,
						"acc_stderr,none": 0.04560480215720684,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314126,
						"acc_stderr,none": 0.013644675781314126,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.013895037677965127,
						"acc_stderr,none": 0.013895037677965127,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.226,
						"acc_norm,none": 0.226,
						"acc_norm_stderr,none": 0.013232501619085334,
						"acc_stderr,none": 0.013232501619085334,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.233,
						"acc_norm,none": 0.233,
						"acc_norm_stderr,none": 0.013374972519220077,
						"acc_stderr,none": 0.013374972519220077,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.226,
						"acc_norm,none": 0.226,
						"acc_norm_stderr,none": 0.013232501619085334,
						"acc_stderr,none": 0.013232501619085334,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.013493000446937594,
						"acc_stderr,none": 0.013493000446937594,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.22833333333333333,
						"acc_norm,none": 0.22833333333333333,
						"acc_norm_stderr,none": 0.017150868516058564,
						"acc_stderr,none": 0.017150868516058564,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440472,
						"acc_stderr,none": 0.013946271849440472,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.013790038620872835,
						"acc_stderr,none": 0.013790038620872835,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.01339490288966001,
						"acc_stderr,none": 0.01339490288966001,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145163,
						"acc_stderr,none": 0.013979965645145163,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.044619604333847394,
						"acc_stderr,none": 0.044619604333847394,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.2633333333333333,
						"acc_norm,none": 0.2633333333333333,
						"acc_norm_stderr,none": 0.025471401031969213,
						"acc_stderr,none": 0.025471401031969213,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.014127086556490528,
						"acc_stderr,none": 0.014127086556490528,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.013569640199177434,
						"acc_stderr,none": 0.013569640199177434,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145144,
						"acc_stderr,none": 0.013979965645145144,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.030056479497755487,
						"acc_stderr,none": 0.030056479497755487,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.219,
						"acc_norm,none": 0.219,
						"acc_norm_stderr,none": 0.013084731950262008,
						"acc_stderr,none": 0.013084731950262008,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.01373625439065114,
						"acc_stderr,none": 0.01373625439065114,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.030275120389073044,
						"acc_stderr,none": 0.030275120389073044,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.304,
						"acc_norm,none": 0.304,
						"acc_norm_stderr,none": 0.014553205687950436,
						"acc_stderr,none": 0.014553205687950436,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5204998903749177,
						"acc_norm,none": 0.57,
						"acc_norm_stderr,none": 0.0004911823647294576,
						"acc_stderr,none": 0.04487013541173626,
						"alias": "kobest",
						"f1,none": 0.4159562277418555,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.50997150997151,
						"acc_stderr,none": 0.013346112671554732,
						"alias": " - kobest_boolq",
						"f1,none": 0.3527317951796895,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.623,
						"acc_stderr,none": 0.015333170125779848,
						"alias": " - kobest_copa",
						"f1,none": 0.6223016357244545,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.426,
						"acc_norm,none": 0.57,
						"acc_norm_stderr,none": 0.02216263442665284,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.4212469167996778,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5214105793450882,
						"acc_stderr,none": 0.025102898696363056,
						"alias": " - kobest_sentineg",
						"f1,none": 0.39228166290686434,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7061905686008151,
						"acc_stderr,none": 0.020503302074608393,
						"alias": "lambada",
						"perplexity,none": 3.896506803042753,
						"perplexity_stderr,none": 0.3119414766884127
					},
					"lambada_cloze": {
						"acc,none": 0.024451775664661363,
						"acc_stderr,none": 0.007959183066224835,
						"alias": "lambada_cloze",
						"perplexity,none": 819.623888493164,
						"perplexity_stderr,none": 288.849890266428
					},
					"lambada_multilingual": {
						"acc,none": 0.5339802057054143,
						"acc_stderr,none": 0.08674646014953317,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.41860062826282,
						"perplexity_stderr,none": 8.523281711849819
					},
					"lambada_openai": {
						"acc,none": 0.7459732194837958,
						"acc_stderr,none": 0.006064757540495046,
						"alias": " - lambada_openai",
						"perplexity,none": 3.2930962387368963,
						"perplexity_stderr,none": 0.06415043605354118
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.03978265088298079,
						"acc_stderr,none": 0.002722975328086062,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 244.63833164409323,
						"perplexity_stderr,none": 7.311225473352508
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4187851736852319,
						"acc_stderr,none": 0.006873470354770233,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 35.89465170757694,
						"perplexity_stderr,none": 2.000877243063329
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.745391034348923,
						"acc_stderr,none": 0.00606933349363062,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.2936761971817705,
						"perplexity_stderr,none": 0.06411330370413029
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4525519115078595,
						"acc_stderr,none": 0.006934541419085972,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.85556237204309,
						"perplexity_stderr,none": 1.4220942757187651
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5435668542596546,
						"acc_stderr,none": 0.006939483436039623,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.747943363534777,
						"perplexity_stderr,none": 0.813997411811357
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5096060547254027,
						"acc_stderr,none": 0.0069646919494281865,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.301169500977508,
						"perplexity_stderr,none": 1.1886964064175747
					},
					"lambada_standard": {
						"acc,none": 0.6671841645643315,
						"acc_stderr,none": 0.0065650332306597305,
						"alias": " - lambada_standard",
						"perplexity,none": 4.498857679542301,
						"perplexity_stderr,none": 0.09524696169255288
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.009120900446341937,
						"acc_stderr,none": 0.0013244673737120068,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 1394.609445342235,
						"perplexity_stderr,none": 38.66494535948621
					},
					"logiqa": {
						"acc,none": 0.23195084485407066,
						"acc_norm,none": 0.2764976958525346,
						"acc_norm_stderr,none": 0.017543209075825177,
						"acc_stderr,none": 0.0165552524979259,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.25127226463104324,
						"acc_norm,none": 0.2786259541984733,
						"acc_norm_stderr,none": 0.011311050680723241,
						"acc_stderr,none": 0.01094324556925147,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.27001675041876044,
						"acc_norm,none": 0.2706867671691792,
						"acc_norm_stderr,none": 0.008133756186080306,
						"acc_stderr,none": 0.008127414157987851,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.42692226223257784,
						"acc_stderr,none": 0.005090637485187123,
						"alias": "mc_taco",
						"f1,none": 0.5309872583860622,
						"f1_stderr,none": 0.0056481898831093535
					},
					"medmcqa": {
						"acc,none": 0.3122161128376763,
						"acc_norm,none": 0.3122161128376763,
						"acc_norm_stderr,none": 0.0071657498060347154,
						"acc_stderr,none": 0.0071657498060347154,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.28436763550667715,
						"acc_norm,none": 0.28436763550667715,
						"acc_norm_stderr,none": 0.012648572500776634,
						"acc_stderr,none": 0.012648572500776634,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.3364193134881071,
						"acc_stderr,none": 0.058093684881406774,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.04232073695151589,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.29605263157894735,
						"acc_stderr,none": 0.037150621549989056,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.38113207547169814,
						"acc_stderr,none": 0.02989060968628664,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3402777777777778,
						"acc_stderr,none": 0.03962135573486219,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145633,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542126,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3352601156069364,
						"acc_stderr,none": 0.03599586301247078,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.043364327079931785,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.33617021276595743,
						"acc_stderr,none": 0.030881618520676942,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.041424397194893596,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2827586206896552,
						"acc_stderr,none": 0.03752833958003337,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.26455026455026454,
						"acc_stderr,none": 0.0227174678977086,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3412698412698413,
						"acc_stderr,none": 0.04240799327574925,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.027869320571664632,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2413793103448276,
						"acc_stderr,none": 0.030108330718011625,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621505,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.40606060606060607,
						"acc_stderr,none": 0.03834816355401181,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3888888888888889,
						"acc_stderr,none": 0.0347327959083696,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.44041450777202074,
						"acc_stderr,none": 0.03582724530036093,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.32051282051282054,
						"acc_stderr,none": 0.023661296393964273,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2740740740740741,
						"acc_stderr,none": 0.027195934804085626,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.28991596638655465,
						"acc_stderr,none": 0.029472485833136088,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.25165562913907286,
						"acc_stderr,none": 0.035433042343899844,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3981651376146789,
						"acc_stderr,none": 0.02098798942265427,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.20833333333333334,
						"acc_stderr,none": 0.027696910713093936,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.36764705882352944,
						"acc_stderr,none": 0.03384132045674119,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.37130801687763715,
						"acc_stderr,none": 0.031450686007448596,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3901345291479821,
						"acc_stderr,none": 0.03273766725459157,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3969465648854962,
						"acc_stderr,none": 0.04291135671009225,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3158342189160467,
						"acc_stderr,none": 0.05090966172779643,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.35537190082644626,
						"acc_stderr,none": 0.04369236326573981,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.04453197507374984,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3496932515337423,
						"acc_stderr,none": 0.03746668325470021,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.04432804055291519,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4174757281553398,
						"acc_stderr,none": 0.048828405482122375,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.4188034188034188,
						"acc_stderr,none": 0.03232128912157792,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4661558109833972,
						"acc_stderr,none": 0.017838956009136802,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.28901734104046245,
						"acc_stderr,none": 0.02440517393578323,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24804469273743016,
						"acc_stderr,none": 0.014444157808261452,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3758169934640523,
						"acc_stderr,none": 0.027732834353363944,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.38879948503379463,
						"acc_stderr,none": 0.050518910535749854,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.43729903536977494,
						"acc_stderr,none": 0.028173917761762878,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3395061728395062,
						"acc_stderr,none": 0.026348564412011624,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.26595744680851063,
						"acc_stderr,none": 0.026358065698880585,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.28748370273794005,
						"acc_stderr,none": 0.011559337355708507,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.40441176470588236,
						"acc_stderr,none": 0.02981263070156974,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.32189542483660133,
						"acc_stderr,none": 0.018901015322093092,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.0469237132203465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.028920583220675585,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.35326616834579133,
						"acc_stderr,none": 0.05073564748965906,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.4129353233830846,
						"acc_stderr,none": 0.03481520803367348,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2990802410402791,
						"acc_stderr,none": 0.05948001519263436,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.048783173121456316,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3433734939759036,
						"acc_stderr,none": 0.03696584317010601,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.4152046783625731,
						"acc_stderr,none": 0.03779275945503201,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7525216505348956,
						"acc_stderr,none": 0.0043561725928579105,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7558991049633849,
						"acc_stderr,none": 0.004332292325983107,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7401960784313726,
						"acc_stderr,none": 0.021736971522579637,
						"alias": "mrpc",
						"f1,none": 0.8306709265175719,
						"f1_stderr,none": 0.016237713162222334
					},
					"multimedqa": {
						"acc,none": 0.34790631653655074,
						"acc_norm,none": 0.30428293576946597,
						"acc_norm_stderr,none": 0.00011771135739860012,
						"acc_stderr,none": 0.10114453543139962,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5402227722772277,
						"acc_stderr,none": 0.007158526919152116,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6999623793646121,
						"mrr_stderr,none": 0.010327368197832405,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407455,
						"r@2,none": 0.42099322799097066,
						"r@2_stderr,none": 0.01659616489551804
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6549097081608481,
						"mrr_stderr,none": 0.010407159272368776,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4729119638826185,
						"r@2_stderr,none": 0.01678263288163964
					},
					"openbookqa": {
						"acc,none": 0.31,
						"acc_norm,none": 0.414,
						"acc_norm_stderr,none": 0.02204949796982786,
						"acc_stderr,none": 0.020704041021724788,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4315,
						"acc_stderr,none": 0.01107769076190085,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.373,
						"acc_stderr,none": 0.01081637633399009,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.011072998945761353,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.549,
						"acc_stderr,none": 0.011129305041886322,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.011122493197456286,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5345,
						"acc_stderr,none": 0.011156482803925168,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5175,
						"acc_stderr,none": 0.01117628425125418,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4839285714285714,
						"acc_stderr,none": 0.05538516470036501,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7671381936887922,
						"acc_norm,none": 0.780739934711643,
						"acc_norm_stderr,none": 0.009653357463605306,
						"acc_stderr,none": 0.00986123607108075,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.25309564474807855,
						"acc_norm,none": 0.2860269000853971,
						"acc_norm_stderr,none": 0.0033015472391030075,
						"acc_stderr,none": 0.0031764935891050814,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.019827714859587568,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7250397905931789,
						"acc_norm,none": 0.6277400828170845,
						"acc_norm_stderr,none": 0.01032729108793745,
						"acc_stderr,none": 0.1617880311531066,
						"alias": "pythia",
						"bits_per_byte,none": 0.6348474372758655,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5527735379681435,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2930962387368963,
						"perplexity_stderr,none": 0.06415043605354118,
						"word_perplexity,none": 10.518195660526436,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.374113475177305,
						"acc_norm,none": 0.4592198581560284,
						"acc_norm_stderr,none": 0.06807781241656956,
						"acc_stderr,none": 0.04750428220910393,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.45,
						"acc_norm,none": 0.6,
						"acc_norm_stderr,none": 0.04490887131390718,
						"acc_stderr,none": 0.04560517440787951,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.325,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.0393415738622931,
						"acc_stderr,none": 0.037144541740773654,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.36971830985915494,
						"acc_norm,none": 0.4119718309859155,
						"acc_norm_stderr,none": 0.0292576613420926,
						"acc_stderr,none": 0.028695223203150082,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7086816720257235,
						"acc_stderr,none": 0.0022597635814995323,
						"alias": "qqp",
						"f1,none": 0.6936641697877652,
						"f1_stderr,none": 0.0026883507854682668
					},
					"race": {
						"acc,none": 0.3550239234449761,
						"acc_stderr,none": 0.014809839887617084,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.275,
						"em_stderr,none": 0.004465366048757013,
						"f1,none": 0.28499857167899606,
						"f1_stderr,none": 0.0044753833618091455
					},
					"rte": {
						"acc,none": 0.6462093862815884,
						"acc_stderr,none": 0.028780957835424687,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.945,
						"acc_norm,none": 0.92,
						"acc_norm_stderr,none": 0.008583336977753651,
						"acc_stderr,none": 0.0072129762946392395,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6462093862815884,
						"acc_stderr,none": 0.028780957835424687,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.9002293577981652,
						"acc_stderr,none": 0.010154741963033079,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5782765170448866,
						"acc_norm,none": 0.7721683494951515,
						"acc_norm_stderr,none": 0.002965471458539544,
						"acc_stderr,none": 0.0034915027847678515,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6151209610329107,
						"acc_stderr,none": 0.062262283044830016,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5392628205128205,
						"acc_stderr,none": 0.0049888027574318365,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.7771359075706902,
						"acc_stderr,none": 0.004189845252138997,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5326470588235294,
						"acc_stderr,none": 0.004940415331967163,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3092432162173067,
						"acc_stderr,none": 0.0014096591931260962,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.29498164014687883,
						"bleu_acc_stderr,none": 0.015964400965589653,
						"bleu_diff,none": -9.021925889680018,
						"bleu_diff_stderr,none": 0.8616559883588482,
						"bleu_max,none": 26.377066983232638,
						"bleu_max_stderr,none": 0.8110227326337474,
						"rouge1_acc,none": 0.25458996328029376,
						"rouge1_acc_stderr,none": 0.015250117079156494,
						"rouge1_diff,none": -11.365414337161942,
						"rouge1_diff_stderr,none": 0.8977534506495294,
						"rouge1_max,none": 51.55107668629475,
						"rouge1_max_stderr,none": 0.8780678390317714,
						"rouge2_acc,none": 0.24112607099143207,
						"rouge2_acc_stderr,none": 0.014974827279752346,
						"rouge2_diff,none": -13.868278994671925,
						"rouge2_diff_stderr,none": 1.102732294268251,
						"rouge2_max,none": 35.2308501114606,
						"rouge2_max_stderr,none": 1.027205391205865,
						"rougeL_acc,none": 0.25458996328029376,
						"rougeL_acc_stderr,none": 0.015250117079156496,
						"rougeL_diff,none": -11.836143289677882,
						"rougeL_diff_stderr,none": 0.9128014202052973,
						"rougeL_max,none": 48.640008568398954,
						"rougeL_max_stderr,none": 0.8967200412459587
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.29498164014687883,
						"bleu_acc_stderr,none": 0.015964400965589653,
						"bleu_diff,none": -9.021925889680018,
						"bleu_diff_stderr,none": 0.8616559883588482,
						"bleu_max,none": 26.377066983232638,
						"bleu_max_stderr,none": 0.8110227326337474,
						"rouge1_acc,none": 0.25458996328029376,
						"rouge1_acc_stderr,none": 0.015250117079156494,
						"rouge1_diff,none": -11.365414337161942,
						"rouge1_diff_stderr,none": 0.8977534506495294,
						"rouge1_max,none": 51.55107668629475,
						"rouge1_max_stderr,none": 0.8780678390317714,
						"rouge2_acc,none": 0.24112607099143207,
						"rouge2_acc_stderr,none": 0.014974827279752346,
						"rouge2_diff,none": -13.868278994671925,
						"rouge2_diff_stderr,none": 1.102732294268251,
						"rouge2_max,none": 35.2308501114606,
						"rouge2_max_stderr,none": 1.027205391205865,
						"rougeL_acc,none": 0.25458996328029376,
						"rougeL_acc_stderr,none": 0.015250117079156496,
						"rougeL_diff,none": -11.836143289677882,
						"rougeL_diff_stderr,none": 0.9128014202052973,
						"rougeL_max,none": 48.640008568398954,
						"rougeL_max_stderr,none": 0.8967200412459587
					},
					"truthfulqa_mc1": {
						"acc,none": 0.23990208078335373,
						"acc_stderr,none": 0.01494881267906214,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3785843516512597,
						"acc_stderr,none": 0.013803484375136657,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.05757874015748032,
						"exact_match_stderr,none": 0.005168906242870982
					},
					"wic": {
						"acc,none": 0.5626959247648903,
						"acc_stderr,none": 0.019654361107553548,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6348474372758655,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5527735379681435,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.518195660526436,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6906077348066298,
						"acc_stderr,none": 0.012991329330822999,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.38461538461538464,
						"acc_stderr,none": 0.0479366886807504,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8644688644688645,
						"acc_stderr,none": 0.020754380015466267,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.616909090909091,
						"acc_stderr,none": 0.07011057546720761,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.021966635293832915,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231336,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.020099950647503233,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.019827714859587574,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.022382357781962132,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269948,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.021513662527582404,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.020514426225628043,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.020776701920308997,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.44147255689424364,
						"acc_stderr,none": 0.05140894932634247,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.009460223484996469,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4907630522088353,
						"acc_stderr,none": 0.010020362530631355,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4955823293172691,
						"acc_stderr,none": 0.01002168168176935,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39116465863453814,
						"acc_stderr,none": 0.009781766322010001,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.52570281124498,
						"acc_stderr,none": 0.010008822253312082,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5036144578313253,
						"acc_stderr,none": 0.010021811000966342,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5060240963855421,
						"acc_stderr,none": 0.01002134544404757,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.40883534136546185,
						"acc_stderr,none": 0.009854078067810771,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4947791164658635,
						"acc_stderr,none": 0.010021526496530347,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.40200803212851405,
						"acc_stderr,none": 0.009827715873484733,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42690763052208835,
						"acc_stderr,none": 0.009914408828583401,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.45140562248995986,
						"acc_stderr,none": 0.009974628047721973,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41405622489959837,
						"acc_stderr,none": 0.009872910116421198,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42409638554216866,
						"acc_stderr,none": 0.00990591824499448,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3522088353413655,
						"acc_stderr,none": 0.009574259292495745,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6246314902833765,
						"acc_stderr,none": 0.05975167650980203,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5909993381866314,
						"acc_stderr,none": 0.012652228567132372,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7703507610853739,
						"acc_stderr,none": 0.010824012610568654,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7068166776968895,
						"acc_stderr,none": 0.01171479117762577,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5665122435473197,
						"acc_stderr,none": 0.012752771973917618,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.598941098610192,
						"acc_stderr,none": 0.012612688318767051,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6611515552614163,
						"acc_stderr,none": 0.012180490758739025,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.542686962276638,
						"acc_stderr,none": 0.012820147204256234,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6770350761085374,
						"acc_stderr,none": 0.012033578346967676,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5479814692256784,
						"acc_stderr,none": 0.012807742345189279,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5876902713434812,
						"acc_stderr,none": 0.01266769412239704,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6207809397749835,
						"acc_stderr,none": 0.012486070771171334,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8168127669139132,
						"acc_stderr,none": 0.03793769487537483,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8791397849462366,
						"acc_stderr,none": 0.00676164843935546,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.05066394254941721,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7528675703858185,
						"acc_stderr,none": 0.013936105008393996,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7832699619771863,
						"acc_stderr,none": 0.025454504291142595,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6825396825396826,
						"acc_stderr,none": 0.026269018848607696,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7718253968253969,
						"acc_stderr,none": 0.018711525330668003,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C0-PRERUN-rwkv-60_pth"
	},
	"./rwkv-x-dev/1_3-C0-rwkv-140_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6397970687711386,
						"acc_norm,none": 0.6426155580608793,
						"acc_norm_stderr,none": 0.08945925665657509,
						"acc_stderr,none": 0.10965744641532278,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.486875,
						"acc_stderr,none": 0.050166759138332295,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0594,
						"acc_stderr,none": 0.03316450083585226,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8135671641791045,
						"acc_stderr,none": 0.16051597921288527,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.29197622585438343,
						"acc_norm,none": 0.29197622585438343,
						"acc_norm_stderr,none": 0.12644343846028328,
						"acc_stderr,none": 0.12644343846028328,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.3179070972198239,
						"acc_norm,none": 0.3179070972198239,
						"acc_norm_stderr,none": 0.06129153851333873,
						"acc_stderr,none": 0.06129153851333873,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.465908243887895,
						"likelihood_diff_stderr,none": 0.518455982928055,
						"pct_stereotype,none": 0.6131484794275492,
						"pct_stereotype_stderr,none": 0.0712074877798488
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.16338582677165353,
						"exact_match_stderr,none": 0.008203795712801942
					},
					"glue": {
						"acc,none": 0.7359010242972844,
						"acc_stderr,none": 0.004289797828827916,
						"alias": "glue",
						"f1,none": 0.7139513735431744,
						"f1_stderr,none": 0.00012111103685625755,
						"mcc,none": 0.2564594180670322,
						"mcc_stderr,none": 0.02931129645363431
					},
					"kmmlu": {
						"acc,none": 0.26809125036095854,
						"acc_norm,none": 0.26809125036095854,
						"acc_norm_stderr,none": 0.02370839322187885,
						"acc_stderr,none": 0.02370839322187885,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5255426441569832,
						"acc_norm,none": 0.558,
						"acc_norm_stderr,none": 0.0004942605210420811,
						"acc_stderr,none": 0.04449362042964428,
						"alias": "kobest",
						"f1,none": 0.4258036260924094,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7173491170192121,
						"acc_stderr,none": 0.017960273438867913,
						"alias": "lambada",
						"perplexity,none": 3.5527309367220585,
						"perplexity_stderr,none": 0.18023249115775042
					},
					"lambada_cloze": {
						"acc,none": 0.07325829613817193,
						"acc_stderr,none": 0.01693182008029767,
						"alias": "lambada_cloze",
						"perplexity,none": 275.3272503131171,
						"perplexity_stderr,none": 14.615974728691885
					},
					"lambada_multilingual": {
						"acc,none": 0.5404618668736658,
						"acc_stderr,none": 0.08614738738024366,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.70104445804611,
						"perplexity_stderr,none": 8.145599087112915
					},
					"mmlu": {
						"acc,none": 0.4027204101979775,
						"acc_stderr,none": 0.08974617977748925,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.38299681190223167,
						"acc_stderr,none": 0.09480458154569965,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.45799806887672995,
						"acc_stderr,none": 0.08573981092379565,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4423139421514462,
						"acc_stderr,none": 0.07568430060929189,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3390421820488424,
						"acc_stderr,none": 0.06907425529787235,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.3859474804826118,
						"acc_norm,none": 0.3579473066095681,
						"acc_norm_stderr,none": 0.0001259322984993744,
						"acc_stderr,none": 0.07762970450797438,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.48392857142857143,
						"acc_stderr,none": 0.05441299111545081,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7379255626280754,
						"acc_norm,none": 0.6465023477418784,
						"acc_norm_stderr,none": 0.010126208711978977,
						"acc_stderr,none": 0.15218464219889588,
						"alias": "pythia",
						"bits_per_byte,none": 0.6341800440795893,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5520553883920474,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.224297722943878,
						"perplexity_stderr,none": 0.06237000241582455,
						"word_perplexity,none": 10.49220853700616,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3953900709219858,
						"acc_norm,none": 0.44680851063829785,
						"acc_norm_stderr,none": 0.061734918442521006,
						"acc_stderr,none": 0.046485306100289836,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.7086619413663439,
						"acc_stderr,none": 0.0756458965868456,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3300158527499425,
						"acc_stderr,none": 0.0015482860471839153,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.33414932680538556,
						"bleu_acc_stderr,none": 0.01651253067715054,
						"bleu_diff,none": -6.559723376123761,
						"bleu_diff_stderr,none": 0.8666634089985142,
						"bleu_max,none": 26.88877662179269,
						"bleu_max_stderr,none": 0.8183585280169915,
						"rouge1_acc,none": 0.31211750305997554,
						"rouge1_acc_stderr,none": 0.016220756769520926,
						"rouge1_diff,none": -8.866971028164613,
						"rouge1_diff_stderr,none": 0.9434867624483417,
						"rouge1_max,none": 51.764695566936396,
						"rouge1_max_stderr,none": 0.8856448263393588,
						"rouge2_acc,none": 0.26805385556915545,
						"rouge2_acc_stderr,none": 0.015506204722834548,
						"rouge2_diff,none": -10.268483887513367,
						"rouge2_diff_stderr,none": 1.1368533205269231,
						"rouge2_max,none": 35.71996352718539,
						"rouge2_max_stderr,none": 1.0370689023857738,
						"rougeL_acc,none": 0.3047735618115055,
						"rougeL_acc_stderr,none": 0.01611412415688246,
						"rougeL_diff,none": -9.0985564461349,
						"rougeL_diff_stderr,none": 0.9552199377800757,
						"rougeL_max,none": 48.98658975761845,
						"rougeL_max_stderr,none": 0.9055088952586435
					},
					"xcopa": {
						"acc,none": 0.6229090909090909,
						"acc_stderr,none": 0.07046092453758131,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43651941097724223,
						"acc_stderr,none": 0.049961388983869155,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6300463269358041,
						"acc_stderr,none": 0.06304309359131462,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8188356934142504,
						"acc_stderr,none": 0.037241161597448925,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6397970687711386,
						"acc_norm,none": 0.6426155580608793,
						"acc_norm_stderr,none": 0.08945925665657509,
						"acc_stderr,none": 0.10965744641532278,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.486875,
						"acc_stderr,none": 0.050166759138332295,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.015560917136921665,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.437,
						"acc_stderr,none": 0.015693223928730377,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.4425,
						"acc_stderr,none": 0.01434397375157324,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.40784982935153585,
						"acc_norm,none": 0.4539249146757679,
						"acc_norm_stderr,none": 0.014549221105171858,
						"acc_stderr,none": 0.014361097288449696,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7542087542087542,
						"acc_norm,none": 0.7356902356902357,
						"acc_norm_stderr,none": 0.009048410451863012,
						"acc_stderr,none": 0.008834809366391487,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0594,
						"acc_stderr,none": 0.03316450083585226,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0515,
						"acc_stderr,none": 0.004943287675881571,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.1265,
						"acc_stderr,none": 0.0074348176343896335,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0645,
						"acc_stderr,none": 0.00549408477216554,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0215,
						"acc_stderr,none": 0.003244092641792835,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.023,
						"acc_stderr,none": 0.0033527780362380437,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.075,
						"acc_stderr,none": 0.005891082449449549,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0075,
						"acc_stderr,none": 0.001929698647051984,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.131,
						"acc_stderr,none": 0.007546388141819297,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.004,
						"acc_stderr,none": 0.0014117352790976717,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0895,
						"acc_stderr,none": 0.006384767625279334,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.004772234273318872,
						"acc_stderr,none": 0.0014357568013434012,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8135671641791045,
						"acc_stderr,none": 0.16051597921288527,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621231,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689093,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.002637794146243787,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.011266140684632168,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.00882342636694232,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366667,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.569,
						"acc_stderr,none": 0.015667944488173505,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.00965801621852432,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.010640169792499366,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346933,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.00344497719409981,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919304,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406103,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666678,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525042,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323497,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.0046408552592747026,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.00918887563499667,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.695,
						"acc_stderr,none": 0.014566646394664392,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.014606483127342763,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.731,
						"acc_stderr,none": 0.014029819522568196,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792932,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823332,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910639,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.464,
						"acc_stderr,none": 0.01577824302490459,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783236,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.818,
						"acc_stderr,none": 0.012207580637662157,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.015572363292015098,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.01420569610409151,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745899,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.01001655286669685,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.00872852720607479,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.00918887563499668,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.664,
						"acc_stderr,none": 0.01494414023379502,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.00863212103213995,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.386,
						"acc_stderr,none": 0.015402637476784376,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.597,
						"acc_stderr,none": 0.01551875741906654,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.0157633906404837,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.796,
						"acc_stderr,none": 0.012749374359024386,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.015726771166750357,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037186,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621228,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.01360735683959812,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333459,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491111,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.012259457340938563,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.598,
						"acc_stderr,none": 0.015512467135715075,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557421,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.01049924922240804,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910613,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.68,
						"acc_stderr,none": 0.014758652303574864,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.441,
						"acc_stderr,none": 0.015708779894242676,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936713,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491125,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.015445859463771302,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.011075814808567038,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904619,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.015818160898606715,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.011628164696727172,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.0073953154557929454,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318241,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656803,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.369,
						"acc_stderr,none": 0.015266698139154617,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.313,
						"acc_stderr,none": 0.014671272822977881,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7119266055045872,
						"acc_stderr,none": 0.007920666477161432,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.04459412925079224,
						"alias": "cb",
						"f1,none": 0.711183235164483,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.29197622585438343,
						"acc_norm,none": 0.29197622585438343,
						"acc_norm_stderr,none": 0.12644343846028328,
						"acc_stderr,none": 0.12644343846028328,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141221,
						"acc_stderr,none": 0.06372446937141221,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5454545454545454,
						"acc_norm,none": 0.5454545454545454,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.23404255319148937,
						"acc_norm,none": 0.23404255319148937,
						"acc_norm_stderr,none": 0.062426763436828805,
						"acc_stderr,none": 0.062426763436828805,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.06288639360110458,
						"acc_stderr,none": 0.06288639360110458,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.45,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.11413288653790232,
						"acc_stderr,none": 0.11413288653790232,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894599,
						"acc_stderr,none": 0.10540925533894599,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033673,
						"acc_stderr,none": 0.10083169033033673,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.058172215566282534,
						"acc_stderr,none": 0.058172215566282534,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.5227272727272727,
						"acc_norm,none": 0.5227272727272727,
						"acc_norm_stderr,none": 0.07617047451458002,
						"acc_stderr,none": 0.07617047451458002,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.391304347826087,
						"acc_norm,none": 0.391304347826087,
						"acc_norm_stderr,none": 0.07275304578557182,
						"acc_stderr,none": 0.07275304578557182,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.3179070972198239,
						"acc_norm,none": 0.3179070972198239,
						"acc_norm_stderr,none": 0.06129153851333873,
						"acc_stderr,none": 0.06129153851333873,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2958579881656805,
						"acc_norm,none": 0.2958579881656805,
						"acc_norm_stderr,none": 0.035214144124964784,
						"acc_stderr,none": 0.035214144124964784,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.28378378378378377,
						"acc_norm,none": 0.28378378378378377,
						"acc_norm_stderr,none": 0.037184093212853736,
						"acc_stderr,none": 0.037184093212853736,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.033635910482728223,
						"acc_stderr,none": 0.033635910482728223,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.425,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.03920394987159571,
						"acc_stderr,none": 0.03920394987159571,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.28484848484848485,
						"acc_norm,none": 0.28484848484848485,
						"acc_norm_stderr,none": 0.035243908445117836,
						"acc_stderr,none": 0.035243908445117836,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3492822966507177,
						"acc_norm,none": 0.3492822966507177,
						"acc_norm_stderr,none": 0.03305620024300091,
						"acc_stderr,none": 0.03305620024300091,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.036342189215581536,
						"acc_stderr,none": 0.036342189215581536,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3435114503816794,
						"acc_norm,none": 0.3435114503816794,
						"acc_norm_stderr,none": 0.041649760719448786,
						"acc_stderr,none": 0.041649760719448786,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3602941176470588,
						"acc_norm,none": 0.3602941176470588,
						"acc_norm_stderr,none": 0.041319197084091215,
						"acc_stderr,none": 0.041319197084091215,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.37383177570093457,
						"acc_norm,none": 0.37383177570093457,
						"acc_norm_stderr,none": 0.04699273118994851,
						"acc_stderr,none": 0.04699273118994851,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3653250773993808,
						"acc_norm,none": 0.3653250773993808,
						"acc_norm_stderr,none": 0.02683412731746283,
						"acc_stderr,none": 0.02683412731746283,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.03198001660115072,
						"acc_stderr,none": 0.03198001660115072,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.39106145251396646,
						"acc_norm,none": 0.39106145251396646,
						"acc_norm_stderr,none": 0.03657625502786071,
						"acc_stderr,none": 0.03657625502786071,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422644,
						"acc_stderr,none": 0.028146970599422644,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.42990654205607476,
						"acc_norm,none": 0.42990654205607476,
						"acc_norm_stderr,none": 0.048084723494299535,
						"acc_stderr,none": 0.048084723494299535,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3867924528301887,
						"acc_norm,none": 0.3867924528301887,
						"acc_norm_stderr,none": 0.04752784159123843,
						"acc_stderr,none": 0.04752784159123843,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809805,
						"acc_stderr,none": 0.039578354719809805,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.041764667586049006,
						"acc_stderr,none": 0.041764667586049006,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.29245283018867924,
						"acc_norm,none": 0.29245283018867924,
						"acc_norm_stderr,none": 0.044392639061996274,
						"acc_stderr,none": 0.044392639061996274,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2783882783882784,
						"acc_norm,none": 0.2783882783882784,
						"acc_norm_stderr,none": 0.02717645531875414,
						"acc_stderr,none": 0.02717645531875414,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3431372549019608,
						"acc_norm,none": 0.3431372549019608,
						"acc_norm_stderr,none": 0.03332139944668086,
						"acc_stderr,none": 0.03332139944668086,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2807017543859649,
						"acc_norm,none": 0.2807017543859649,
						"acc_norm_stderr,none": 0.034462962170884265,
						"acc_stderr,none": 0.034462962170884265,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.03814280082617517,
						"acc_stderr,none": 0.03814280082617517,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.26618705035971224,
						"acc_norm,none": 0.26618705035971224,
						"acc_norm_stderr,none": 0.03762240935089088,
						"acc_stderr,none": 0.03762240935089088,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3836477987421384,
						"acc_norm,none": 0.3836477987421384,
						"acc_norm_stderr,none": 0.0386858628270552,
						"acc_stderr,none": 0.0386858628270552,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3619631901840491,
						"acc_norm,none": 0.3619631901840491,
						"acc_norm_stderr,none": 0.037757007291414416,
						"acc_stderr,none": 0.037757007291414416,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.3023255813953488,
						"acc_norm,none": 0.3023255813953488,
						"acc_norm_stderr,none": 0.03512091263428369,
						"acc_stderr,none": 0.03512091263428369,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2896825396825397,
						"acc_norm,none": 0.2896825396825397,
						"acc_norm_stderr,none": 0.02863192475336099,
						"acc_stderr,none": 0.02863192475336099,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.29797979797979796,
						"acc_norm,none": 0.29797979797979796,
						"acc_norm_stderr,none": 0.032586303838365555,
						"acc_stderr,none": 0.032586303838365555,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.4495798319327731,
						"acc_norm,none": 0.4495798319327731,
						"acc_norm_stderr,none": 0.03231293497137707,
						"acc_stderr,none": 0.03231293497137707,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.22608695652173913,
						"acc_norm,none": 0.22608695652173913,
						"acc_norm_stderr,none": 0.02764178570724133,
						"acc_stderr,none": 0.02764178570724133,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.3111111111111111,
						"acc_norm,none": 0.3111111111111111,
						"acc_norm_stderr,none": 0.039992628766177214,
						"acc_stderr,none": 0.039992628766177214,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3356643356643357,
						"acc_norm,none": 0.3356643356643357,
						"acc_norm_stderr,none": 0.03962800523347343,
						"acc_stderr,none": 0.03962800523347343,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.3465909090909091,
						"acc_norm,none": 0.3465909090909091,
						"acc_norm_stderr,none": 0.0359734545643587,
						"acc_stderr,none": 0.0359734545643587,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3221476510067114,
						"acc_norm,none": 0.3221476510067114,
						"acc_norm_stderr,none": 0.038411757592369186,
						"acc_stderr,none": 0.038411757592369186,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331528,
						"acc_stderr,none": 0.03360300796331528,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3220338983050847,
						"acc_norm,none": 0.3220338983050847,
						"acc_norm_stderr,none": 0.04319782230261342,
						"acc_stderr,none": 0.04319782230261342,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.041723430387053825,
						"acc_stderr,none": 0.041723430387053825,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.35664335664335667,
						"acc_norm,none": 0.35664335664335667,
						"acc_norm_stderr,none": 0.04019747669236479,
						"acc_stderr,none": 0.04019747669236479,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.31746031746031744,
						"acc_norm,none": 0.31746031746031744,
						"acc_norm_stderr,none": 0.04163453031302859,
						"acc_stderr,none": 0.04163453031302859,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741764,
						"acc_stderr,none": 0.03351597731741764,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.38372093023255816,
						"acc_norm,none": 0.38372093023255816,
						"acc_norm_stderr,none": 0.03718762118238794,
						"acc_stderr,none": 0.03718762118238794,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2846715328467153,
						"acc_norm,none": 0.2846715328467153,
						"acc_norm_stderr,none": 0.022286036929717288,
						"acc_stderr,none": 0.022286036929717288,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.46261682242990654,
						"acc_norm,none": 0.46261682242990654,
						"acc_norm_stderr,none": 0.03416354604102856,
						"acc_stderr,none": 0.03416354604102856,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3252032520325203,
						"acc_norm,none": 0.3252032520325203,
						"acc_norm_stderr,none": 0.042411537335732975,
						"acc_stderr,none": 0.042411537335732975,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.30327868852459017,
						"acc_norm,none": 0.30327868852459017,
						"acc_norm_stderr,none": 0.04178859878631876,
						"acc_stderr,none": 0.04178859878631876,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.34285714285714286,
						"acc_norm,none": 0.34285714285714286,
						"acc_norm_stderr,none": 0.03283321069643156,
						"acc_stderr,none": 0.03283321069643156,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3111111111111111,
						"acc_norm,none": 0.3111111111111111,
						"acc_norm_stderr,none": 0.03460236918732731,
						"acc_stderr,none": 0.03460236918732731,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.32275132275132273,
						"acc_norm,none": 0.32275132275132273,
						"acc_norm_stderr,none": 0.03409802097064963,
						"acc_stderr,none": 0.03409802097064963,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.28448275862068967,
						"acc_norm,none": 0.28448275862068967,
						"acc_norm_stderr,none": 0.042071607555840204,
						"acc_stderr,none": 0.042071607555840204,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2827586206896552,
						"acc_norm,none": 0.2827586206896552,
						"acc_norm_stderr,none": 0.03752833958003337,
						"acc_stderr,none": 0.03752833958003337,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.34285714285714286,
						"acc_norm,none": 0.34285714285714286,
						"acc_norm_stderr,none": 0.04654465622977447,
						"acc_stderr,none": 0.04654465622977447,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2914285714285714,
						"acc_norm,none": 0.2914285714285714,
						"acc_norm_stderr,none": 0.034449526562290174,
						"acc_stderr,none": 0.034449526562290174,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27488151658767773,
						"acc_norm,none": 0.27488151658767773,
						"acc_norm_stderr,none": 0.030808291124780344,
						"acc_stderr,none": 0.030808291124780344,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2632978723404255,
						"acc_norm,none": 0.2632978723404255,
						"acc_norm_stderr,none": 0.022743327388426434,
						"acc_stderr,none": 0.022743327388426434,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3275862068965517,
						"acc_norm,none": 0.3275862068965517,
						"acc_norm_stderr,none": 0.03087984562096084,
						"acc_stderr,none": 0.03087984562096084,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.03613730415279119,
						"acc_stderr,none": 0.03613730415279119,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.3111111111111111,
						"acc_norm,none": 0.3111111111111111,
						"acc_norm_stderr,none": 0.03999262876617722,
						"acc_stderr,none": 0.03999262876617722,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3584070796460177,
						"acc_norm,none": 0.3584070796460177,
						"acc_norm_stderr,none": 0.03196883516493523,
						"acc_stderr,none": 0.03196883516493523,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3575757575757576,
						"acc_norm,none": 0.3575757575757576,
						"acc_norm_stderr,none": 0.037425970438065864,
						"acc_stderr,none": 0.037425970438065864,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.03273943999002354,
						"acc_stderr,none": 0.03273943999002354,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.34911242603550297,
						"acc_norm,none": 0.34911242603550297,
						"acc_norm_stderr,none": 0.03677739827593944,
						"acc_stderr,none": 0.03677739827593944,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.38509316770186336,
						"acc_norm,none": 0.38509316770186336,
						"acc_norm_stderr,none": 0.038470477308191114,
						"acc_stderr,none": 0.038470477308191114,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.03782614981812041,
						"acc_stderr,none": 0.03782614981812041,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.2564594180670322,
						"mcc_stderr,none": 0.02931129645363431
					},
					"copa": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.036845294917747094,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.465908243887895,
						"likelihood_diff_stderr,none": 0.518455982928055,
						"pct_stereotype,none": 0.6131484794275492,
						"pct_stereotype_stderr,none": 0.0712074877798488
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.7239862850327965,
						"likelihood_diff_stderr,none": 0.08739774261202633,
						"pct_stereotype,none": 0.6440071556350626,
						"pct_stereotype_stderr,none": 0.01169577415693421
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.053571428571429,
						"likelihood_diff_stderr,none": 0.38411859595882814,
						"pct_stereotype,none": 0.7142857142857143,
						"pct_stereotype_stderr,none": 0.04761904761904759
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.965909090909091,
						"likelihood_diff_stderr,none": 1.8126834475417204,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.996153846153846,
						"likelihood_diff_stderr,none": 0.6121636710938511,
						"pct_stereotype,none": 0.7692307692307693,
						"pct_stereotype_stderr,none": 0.052665630529342915
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.689453125,
						"likelihood_diff_stderr,none": 0.16542050291025195,
						"pct_stereotype,none": 0.63125,
						"pct_stereotype_stderr,none": 0.027012909806946844
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.6041666666666665,
						"likelihood_diff_stderr,none": 0.24320823923410229,
						"pct_stereotype,none": 0.5787037037037037,
						"pct_stereotype_stderr,none": 0.03367462138896078
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.9583333333333335,
						"likelihood_diff_stderr,none": 0.319569870593151,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.5551181102362204,
						"likelihood_diff_stderr,none": 0.1534717195803001,
						"pct_stereotype,none": 0.5511811023622047,
						"pct_stereotype_stderr,none": 0.022089136921635943
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.6768018018018016,
						"likelihood_diff_stderr,none": 0.34798055991241317,
						"pct_stereotype,none": 0.7027027027027027,
						"pct_stereotype_stderr,none": 0.04357977161242459
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.092741935483871,
						"likelihood_diff_stderr,none": 0.4375893318009899,
						"pct_stereotype,none": 0.9139784946236559,
						"pct_stereotype_stderr,none": 0.029233283218071043
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.234210526315789,
						"likelihood_diff_stderr,none": 0.2428371752160609,
						"pct_stereotype,none": 0.6842105263157895,
						"pct_stereotype_stderr,none": 0.03381137233892748
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.209190518783542,
						"likelihood_diff_stderr,none": 0.0743023652021914,
						"pct_stereotype,none": 0.5831842576028623,
						"pct_stereotype_stderr,none": 0.012043090376959054
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.3680555555555554,
						"likelihood_diff_stderr,none": 0.31515483476845213,
						"pct_stereotype,none": 0.5555555555555556,
						"pct_stereotype_stderr,none": 0.05267171812666418
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.8653846153846154,
						"likelihood_diff_stderr,none": 0.8663100464885207,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.795454545454546,
						"likelihood_diff_stderr,none": 0.4289871620064507,
						"pct_stereotype,none": 0.696969696969697,
						"pct_stereotype_stderr,none": 0.057002420795512765
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.7340342679127727,
						"likelihood_diff_stderr,none": 0.1283738152999422,
						"pct_stereotype,none": 0.5825545171339563,
						"pct_stereotype_stderr,none": 0.027567233250091173
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.257905138339921,
						"likelihood_diff_stderr,none": 0.19590429511216012,
						"pct_stereotype,none": 0.4189723320158103,
						"pct_stereotype_stderr,none": 0.031080701217616472
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.5,
						"likelihood_diff_stderr,none": 0.4359953446220285,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.05594542388644592
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.8603260869565217,
						"likelihood_diff_stderr,none": 0.1410006568697293,
						"pct_stereotype,none": 0.4956521739130435,
						"pct_stereotype_stderr,none": 0.023337119039688343
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.417391304347826,
						"likelihood_diff_stderr,none": 0.2758683976078567,
						"pct_stereotype,none": 0.782608695652174,
						"pct_stereotype_stderr,none": 0.038631448549506
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.57967032967033,
						"likelihood_diff_stderr,none": 0.31953776331882866,
						"pct_stereotype,none": 0.8241758241758241,
						"pct_stereotype_stderr,none": 0.040126194689023176
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.75765306122449,
						"likelihood_diff_stderr,none": 0.24498440879961514,
						"pct_stereotype,none": 0.7193877551020408,
						"pct_stereotype_stderr,none": 0.032174923577801474
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.16338582677165353,
						"exact_match_stderr,none": 0.008203795712801942
					},
					"glue": {
						"acc,none": 0.7359010242972844,
						"acc_stderr,none": 0.004289797828827916,
						"alias": "glue",
						"f1,none": 0.7139513735431744,
						"f1_stderr,none": 0.00012111103685625755,
						"mcc,none": 0.2564594180670322,
						"mcc_stderr,none": 0.02931129645363431
					},
					"hellaswag": {
						"acc,none": 0.54052977494523,
						"acc_norm,none": 0.72814180442143,
						"acc_norm_stderr,none": 0.004440079173276974,
						"acc_stderr,none": 0.004973361339169649,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.26809125036095854,
						"acc_norm,none": 0.26809125036095854,
						"acc_norm_stderr,none": 0.02370839322187885,
						"acc_stderr,none": 0.02370839322187885,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909283,
						"acc_stderr,none": 0.04292346959909283,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234197,
						"acc_stderr,none": 0.013807775152234197,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.01420569610409151,
						"acc_stderr,none": 0.01420569610409151,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.013825416526895016,
						"acc_stderr,none": 0.013825416526895016,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568196,
						"acc_stderr,none": 0.014029819522568196,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.25333333333333335,
						"acc_norm,none": 0.25333333333333335,
						"acc_norm_stderr,none": 0.017770356455067422,
						"acc_stderr,none": 0.017770356455067422,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568198,
						"acc_stderr,none": 0.014029819522568198,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.322,
						"acc_norm,none": 0.322,
						"acc_norm_stderr,none": 0.014782913600996669,
						"acc_stderr,none": 0.014782913600996669,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.283,
						"acc_norm,none": 0.283,
						"acc_norm_stderr,none": 0.014251810906481751,
						"acc_stderr,none": 0.014251810906481751,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.031999921482315785,
						"acc_stderr,none": 0.031999921482315785,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.014370995982377937,
						"acc_stderr,none": 0.014370995982377937,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.17692307692307693,
						"acc_norm,none": 0.17692307692307693,
						"acc_norm_stderr,none": 0.03359832288347953,
						"acc_stderr,none": 0.03359832288347953,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.045126085985421255,
						"acc_stderr,none": 0.045126085985421255,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234194,
						"acc_stderr,none": 0.013807775152234194,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.01432694179723156,
						"acc_stderr,none": 0.01432694179723156,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.01349300044693759,
						"acc_stderr,none": 0.01349300044693759,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.013912208651021349,
						"acc_stderr,none": 0.013912208651021349,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.01386041525752791,
						"acc_stderr,none": 0.01386041525752791,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234183,
						"acc_stderr,none": 0.013807775152234183,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462623,
						"acc_stderr,none": 0.014078856992462623,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.293,
						"acc_norm,none": 0.293,
						"acc_norm_stderr,none": 0.014399942998441271,
						"acc_stderr,none": 0.014399942998441271,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.04512608598542126,
						"acc_stderr,none": 0.04512608598542126,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568196,
						"acc_stderr,none": 0.014029819522568196,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.01417451646148525,
						"acc_stderr,none": 0.01417451646148525,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633916,
						"acc_stderr,none": 0.014046255632633916,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.251,
						"acc_norm,none": 0.251,
						"acc_norm_stderr,none": 0.013718133516888928,
						"acc_stderr,none": 0.013718133516888928,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.013772206565168543,
						"acc_stderr,none": 0.013772206565168543,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750641,
						"acc_stderr,none": 0.013626065817750641,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.24833333333333332,
						"acc_norm,none": 0.24833333333333332,
						"acc_norm_stderr,none": 0.01765292774333302,
						"acc_stderr,none": 0.01765292774333302,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.013895037677965143,
						"acc_stderr,none": 0.013895037677965143,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.014095022868717597,
						"acc_stderr,none": 0.014095022868717597,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.283,
						"acc_norm,none": 0.283,
						"acc_norm_stderr,none": 0.014251810906481753,
						"acc_stderr,none": 0.014251810906481753,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322674,
						"acc_stderr,none": 0.041633319989322674,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.27666666666666667,
						"acc_norm,none": 0.27666666666666667,
						"acc_norm_stderr,none": 0.025870931391123505,
						"acc_stderr,none": 0.025870931391123505,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.013895037677965131,
						"acc_stderr,none": 0.013895037677965131,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986186,
						"acc_stderr,none": 0.014062601350986186,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986187,
						"acc_stderr,none": 0.014062601350986187,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.031093957143700265,
						"acc_stderr,none": 0.031093957143700265,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920854,
						"acc_stderr,none": 0.013512312258920854,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.013895037677965131,
						"acc_stderr,none": 0.013895037677965131,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.030695456590127176,
						"acc_stderr,none": 0.030695456590127176,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.325,
						"acc_norm,none": 0.325,
						"acc_norm_stderr,none": 0.014818724459095524,
						"acc_stderr,none": 0.014818724459095524,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5255426441569832,
						"acc_norm,none": 0.558,
						"acc_norm_stderr,none": 0.0004942605210420811,
						"acc_stderr,none": 0.04449362042964428,
						"alias": "kobest",
						"f1,none": 0.4258036260924094,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5192307692307693,
						"acc_stderr,none": 0.013338890495620682,
						"alias": " - kobest_boolq",
						"f1,none": 0.37909766386888266,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.628,
						"acc_stderr,none": 0.015292149942040577,
						"alias": " - kobest_copa",
						"f1,none": 0.626744368054134,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.436,
						"acc_norm,none": 0.558,
						"acc_norm_stderr,none": 0.022231970696321122,
						"acc_stderr,none": 0.0221989546414768,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.43071683801793426,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5188916876574308,
						"acc_stderr,none": 0.025108004284191587,
						"alias": " - kobest_sentineg",
						"f1,none": 0.38358547470592536,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4888888888888889,
						"acc_stderr,none": 0.014088017407699532,
						"alias": " - kobest_wic",
						"f1,none": 0.3297230142969956,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7173491170192121,
						"acc_stderr,none": 0.017960273438867913,
						"alias": "lambada",
						"perplexity,none": 3.5527309367220585,
						"perplexity_stderr,none": 0.18023249115775042
					},
					"lambada_cloze": {
						"acc,none": 0.07325829613817193,
						"acc_stderr,none": 0.01693182008029767,
						"alias": "lambada_cloze",
						"perplexity,none": 275.3272503131171,
						"perplexity_stderr,none": 14.615974728691885
					},
					"lambada_multilingual": {
						"acc,none": 0.5404618668736658,
						"acc_stderr,none": 0.08614738738024366,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.70104445804611,
						"perplexity_stderr,none": 8.145599087112915
					},
					"lambada_openai": {
						"acc,none": 0.7514069474092762,
						"acc_stderr,none": 0.006021354126051953,
						"alias": " - lambada_openai",
						"perplexity,none": 3.224297722943878,
						"perplexity_stderr,none": 0.06237000241582455
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.04017077430622938,
						"acc_stderr,none": 0.0027356728423199536,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 251.51660360637948,
						"perplexity_stderr,none": 7.687915773011564
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42907044440131964,
						"acc_stderr,none": 0.0068955297392451365,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.21724224159211,
						"perplexity_stderr,none": 1.9018042286689423
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7510188239860276,
						"acc_stderr,none": 0.006024496287103927,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.221960101154172,
						"perplexity_stderr,none": 0.06226645419707366
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45623908402872115,
						"acc_stderr,none": 0.006939246426049401,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.21438704704149,
						"perplexity_stderr,none": 1.3865996302823307
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5513293227246264,
						"acc_stderr,none": 0.006929173919665486,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.297727236834717,
						"perplexity_stderr,none": 0.788890663046697
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5146516592276343,
						"acc_stderr,none": 0.0069629862264550075,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.553905663608074,
						"perplexity_stderr,none": 1.140740287556998
					},
					"lambada_standard": {
						"acc,none": 0.6836794100523966,
						"acc_stderr,none": 0.006478911597012887,
						"alias": " - lambada_standard",
						"perplexity,none": 3.884305857253902,
						"perplexity_stderr,none": 0.07823650164467043
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.1063458179701145,
						"acc_stderr,none": 0.0042949392654540825,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 299.1378970198547,
						"perplexity_stderr,none": 9.201216229551031
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.3326972010178117,
						"exact_match_stderr,get-answer": 0.011887703221215686
					},
					"logiqa": {
						"acc,none": 0.24731182795698925,
						"acc_norm,none": 0.28110599078341014,
						"acc_norm_stderr,none": 0.01763237462646,
						"acc_stderr,none": 0.016922842446712393,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.25636132315521626,
						"acc_norm,none": 0.2900763358778626,
						"acc_norm_stderr,none": 0.0114491668492253,
						"acc_stderr,none": 0.011015878683092594,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25795644891122277,
						"acc_norm,none": 0.2639865996649916,
						"acc_norm_stderr,none": 0.0080692726944333,
						"acc_stderr,none": 0.008009187907885275,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.4686507095954247,
						"acc_stderr,none": 0.005135771502312559,
						"alias": "mc_taco",
						"f1,none": 0.547569663630625,
						"f1_stderr,none": 0.0057099442971061155
					},
					"medmcqa": {
						"acc,none": 0.34783648099450154,
						"acc_norm,none": 0.34783648099450154,
						"acc_norm_stderr,none": 0.007365017800604737,
						"acc_stderr,none": 0.007365017800604737,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3762765121759623,
						"acc_norm,none": 0.3762765121759623,
						"acc_norm_stderr,none": 0.01358332364549917,
						"acc_stderr,none": 0.01358332364549917,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.4027204101979775,
						"acc_stderr,none": 0.08974617977748925,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909283,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.04232073695151589,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.4144736842105263,
						"acc_stderr,none": 0.04008973785779206,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.43018867924528303,
						"acc_stderr,none": 0.030471445867183235,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3888888888888889,
						"acc_stderr,none": 0.04076663253918567,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.4046242774566474,
						"acc_stderr,none": 0.03742461193887248,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.042801058373643966,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.04975698519562427,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3659574468085106,
						"acc_stderr,none": 0.031489558297455304,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.040493392977481404,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4206896551724138,
						"acc_stderr,none": 0.0411391498118926,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.023068188848261124,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.042163702135578345,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621505,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.45161290322580644,
						"acc_stderr,none": 0.02831050034856839,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.29064039408866993,
						"acc_stderr,none": 0.03194740072265541,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5696969696969697,
						"acc_stderr,none": 0.03866225962879077,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4444444444444444,
						"acc_stderr,none": 0.03540294377095368,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5440414507772021,
						"acc_stderr,none": 0.03594413711272434,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3384615384615385,
						"acc_stderr,none": 0.023991500500313036,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3037037037037037,
						"acc_stderr,none": 0.028037929969114993,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.36134453781512604,
						"acc_stderr,none": 0.031204691225150013,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23841059602649006,
						"acc_stderr,none": 0.03479185572599661,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5284403669724771,
						"acc_stderr,none": 0.021402615697348044,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.029886910547626974,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5147058823529411,
						"acc_stderr,none": 0.035077938347913236,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5991561181434599,
						"acc_stderr,none": 0.031900803894732356,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4125560538116592,
						"acc_stderr,none": 0.03304062175449297,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5114503816793893,
						"acc_stderr,none": 0.043841400240780176,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.38299681190223167,
						"acc_stderr,none": 0.09480458154569965,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4380165289256198,
						"acc_stderr,none": 0.045291468044357915,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.4722222222222222,
						"acc_stderr,none": 0.04826217294139894,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.37423312883435583,
						"acc_stderr,none": 0.03802068102899615,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.04432804055291519,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5145631067961165,
						"acc_stderr,none": 0.049486373240266356,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6666666666666666,
						"acc_stderr,none": 0.030882736974138653,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.049888765156985884,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5759897828863346,
						"acc_stderr,none": 0.017672263329084222,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.40173410404624277,
						"acc_stderr,none": 0.02639410417764363,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2201117318435754,
						"acc_stderr,none": 0.013856994024227175,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.4150326797385621,
						"acc_stderr,none": 0.028213504177824093,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.45799806887672995,
						"acc_stderr,none": 0.08573981092379565,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.5080385852090032,
						"acc_stderr,none": 0.028394421370984538,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.44135802469135804,
						"acc_stderr,none": 0.027628737155668773,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.28368794326241137,
						"acc_stderr,none": 0.026891709428343954,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.333116036505867,
						"acc_stderr,none": 0.012037930451512054,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4007352941176471,
						"acc_stderr,none": 0.02976826352893311,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.41830065359477125,
						"acc_stderr,none": 0.019955975145835553,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.44545454545454544,
						"acc_stderr,none": 0.047605488214603246,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3836734693877551,
						"acc_stderr,none": 0.03113088039623593,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4423139421514462,
						"acc_stderr,none": 0.07568430060929189,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.572139303482587,
						"acc_stderr,none": 0.03498541988407795,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3390421820488424,
						"acc_stderr,none": 0.06907425529787235,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.05016135580465919,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3614457831325301,
						"acc_stderr,none": 0.0374005938202932,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6198830409356725,
						"acc_stderr,none": 0.037229657413855394,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7832908813041264,
						"acc_stderr,none": 0.004158886818976362,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7872253864930838,
						"acc_stderr,none": 0.004127723543714246,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7230392156862745,
						"acc_stderr,none": 0.02218159962295435,
						"alias": "mrpc",
						"f1,none": 0.8285280728376327,
						"f1_stderr,none": 0.01589868961048769
					},
					"multimedqa": {
						"acc,none": 0.3859474804826118,
						"acc_norm,none": 0.3579473066095681,
						"acc_norm_stderr,none": 0.0001259322984993744,
						"acc_stderr,none": 0.07762970450797438,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5076320132013201,
						"acc_stderr,none": 0.007180966411289075,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7033483838456479,
						"mrr_stderr,none": 0.010347830939142225,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.41309255079006774,
						"r@2_stderr,none": 0.016551480902963107
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6548156524350358,
						"mrr_stderr,none": 0.01043635718948279,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4683972911963883,
						"r@2_stderr,none": 0.016773710557640358
					},
					"openbookqa": {
						"acc,none": 0.3,
						"acc_norm,none": 0.408,
						"acc_norm_stderr,none": 0.02200091089387719,
						"acc_stderr,none": 0.020514426225628036,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4395,
						"acc_stderr,none": 0.011100968009384218,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3735,
						"acc_stderr,none": 0.010819306988058637,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4265,
						"acc_stderr,none": 0.011061647934531042,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.011131484850525779,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5505,
						"acc_stderr,none": 0.011125950223877364,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.011154111668060216,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5135,
						"acc_stderr,none": 0.011179059024816817,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48392857142857143,
						"acc_stderr,none": 0.05441299111545081,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7758433079434167,
						"acc_norm,none": 0.7878128400435256,
						"acc_norm_stderr,none": 0.00953929982817406,
						"acc_stderr,none": 0.009729897956410041,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.24439581554227155,
						"acc_norm,none": 0.28901579846285225,
						"acc_norm_stderr,none": 0.0033117985997640313,
						"acc_stderr,none": 0.003139548607636636,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.68,
						"acc_stderr,none": 0.0208823404887618,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7379255626280754,
						"acc_norm,none": 0.6465023477418784,
						"acc_norm_stderr,none": 0.010126208711978977,
						"acc_stderr,none": 0.15218464219889588,
						"alias": "pythia",
						"bits_per_byte,none": 0.6341800440795893,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5520553883920474,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.224297722943878,
						"perplexity_stderr,none": 0.06237000241582455,
						"word_perplexity,none": 10.49220853700616,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3953900709219858,
						"acc_norm,none": 0.44680851063829785,
						"acc_norm_stderr,none": 0.061734918442521006,
						"acc_stderr,none": 0.046485306100289836,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4666666666666667,
						"acc_norm,none": 0.5666666666666667,
						"acc_norm_stderr,none": 0.04542567625794981,
						"acc_stderr,none": 0.0457329560380023,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.35,
						"acc_norm,none": 0.45625,
						"acc_norm_stderr,none": 0.039500492593059405,
						"acc_stderr,none": 0.037826149818120415,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3908450704225352,
						"acc_norm,none": 0.3908450704225352,
						"acc_norm_stderr,none": 0.029005007569909817,
						"acc_stderr,none": 0.029005007569909824,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7423942616868662,
						"acc_stderr,none": 0.002174946457349876,
						"alias": "qqp",
						"f1,none": 0.7129826108523714,
						"f1_stderr,none": 0.002690770781255905
					},
					"race": {
						"acc,none": 0.3435406698564593,
						"acc_stderr,none": 0.014697475413671396,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2694,
						"em_stderr,none": 0.004436702878630357,
						"f1,none": 0.2792938097715378,
						"f1_stderr,none": 0.004448154082596839
					},
					"rte": {
						"acc,none": 0.6823104693140795,
						"acc_stderr,none": 0.028024503562454613,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.957,
						"acc_norm,none": 0.959,
						"acc_norm_stderr,none": 0.006273624021118801,
						"acc_stderr,none": 0.006418114379799741,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6787003610108303,
						"acc_stderr,none": 0.02810862605328869,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8887614678899083,
						"acc_stderr,none": 0.010653962307071932,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5799760071978406,
						"acc_norm,none": 0.7711686494051785,
						"acc_norm_stderr,none": 0.0029700459400097026,
						"acc_stderr,none": 0.0034895769982513348,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.7086619413663439,
						"acc_stderr,none": 0.0756458965868456,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.64453125,
						"acc_stderr,none": 0.0047906248992293735,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.9100030404378231,
						"acc_stderr,none": 0.002881142805727033,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5766666666666667,
						"acc_stderr,none": 0.00489243271337591,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3300158527499425,
						"acc_stderr,none": 0.0015482860471839153,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.33414932680538556,
						"bleu_acc_stderr,none": 0.01651253067715054,
						"bleu_diff,none": -6.559723376123761,
						"bleu_diff_stderr,none": 0.8666634089985142,
						"bleu_max,none": 26.88877662179269,
						"bleu_max_stderr,none": 0.8183585280169915,
						"rouge1_acc,none": 0.31211750305997554,
						"rouge1_acc_stderr,none": 0.016220756769520926,
						"rouge1_diff,none": -8.866971028164613,
						"rouge1_diff_stderr,none": 0.9434867624483417,
						"rouge1_max,none": 51.764695566936396,
						"rouge1_max_stderr,none": 0.8856448263393588,
						"rouge2_acc,none": 0.26805385556915545,
						"rouge2_acc_stderr,none": 0.015506204722834548,
						"rouge2_diff,none": -10.268483887513367,
						"rouge2_diff_stderr,none": 1.1368533205269231,
						"rouge2_max,none": 35.71996352718539,
						"rouge2_max_stderr,none": 1.0370689023857738,
						"rougeL_acc,none": 0.3047735618115055,
						"rougeL_acc_stderr,none": 0.01611412415688246,
						"rougeL_diff,none": -9.0985564461349,
						"rougeL_diff_stderr,none": 0.9552199377800757,
						"rougeL_max,none": 48.98658975761845,
						"rougeL_max_stderr,none": 0.9055088952586435
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.33414932680538556,
						"bleu_acc_stderr,none": 0.01651253067715054,
						"bleu_diff,none": -6.559723376123761,
						"bleu_diff_stderr,none": 0.8666634089985142,
						"bleu_max,none": 26.88877662179269,
						"bleu_max_stderr,none": 0.8183585280169915,
						"rouge1_acc,none": 0.31211750305997554,
						"rouge1_acc_stderr,none": 0.016220756769520926,
						"rouge1_diff,none": -8.866971028164613,
						"rouge1_diff_stderr,none": 0.9434867624483417,
						"rouge1_max,none": 51.764695566936396,
						"rouge1_max_stderr,none": 0.8856448263393588,
						"rouge2_acc,none": 0.26805385556915545,
						"rouge2_acc_stderr,none": 0.015506204722834548,
						"rouge2_diff,none": -10.268483887513367,
						"rouge2_diff_stderr,none": 1.1368533205269231,
						"rouge2_max,none": 35.71996352718539,
						"rouge2_max_stderr,none": 1.0370689023857738,
						"rougeL_acc,none": 0.3047735618115055,
						"rougeL_acc_stderr,none": 0.01611412415688246,
						"rougeL_diff,none": -9.0985564461349,
						"rougeL_diff_stderr,none": 0.9552199377800757,
						"rougeL_max,none": 48.98658975761845,
						"rougeL_max_stderr,none": 0.9055088952586435
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25703794369645044,
						"acc_stderr,none": 0.015298077509485083,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.40299376180343455,
						"acc_stderr,none": 0.014081455204992549,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.1643700787401575,
						"exact_match_stderr,none": 0.008223627138088828
					},
					"wic": {
						"acc,none": 0.5564263322884012,
						"acc_stderr,none": 0.01968416511809314,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6341800440795893,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5520553883920474,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.49220853700616,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7245461720599842,
						"acc_stderr,none": 0.012555690055709534,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.05961305784972239,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.40384615384615385,
						"acc_stderr,none": 0.048346889526540184,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8717948717948718,
						"acc_stderr,none": 0.02027101064210495,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6229090909090909,
						"acc_stderr,none": 0.07046092453758131,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.021814300984787635,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.022352791650914167,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.02035437548053008,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.019635965529725512,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407056,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.022149790663861923,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.644,
						"acc_stderr,none": 0.02143471235607266,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.02009995064750323,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.020553269174209177,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43651941097724223,
						"acc_stderr,none": 0.049961388983869155,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3353413654618474,
						"acc_stderr,none": 0.009463034891512697,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4891566265060241,
						"acc_stderr,none": 0.01001971582448348,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4891566265060241,
						"acc_stderr,none": 0.010019715824483482,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38032128514056224,
						"acc_stderr,none": 0.009730746464767608,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5301204819277109,
						"acc_stderr,none": 0.010003871419517736,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5028112449799197,
						"acc_stderr,none": 0.010021914455122174,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4971887550200803,
						"acc_stderr,none": 0.010021914455122181,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41244979919678715,
						"acc_stderr,none": 0.009867237678555588,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4847389558232932,
						"acc_stderr,none": 0.010017403508578984,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3967871485943775,
						"acc_stderr,none": 0.00980622024667002,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41726907630522087,
						"acc_stderr,none": 0.00988393053751779,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44738955823293175,
						"acc_stderr,none": 0.009966439091407929,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41405622489959837,
						"acc_stderr,none": 0.009872910116421194,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40321285140562246,
						"acc_stderr,none": 0.00983251156086806,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3477911646586345,
						"acc_stderr,none": 0.009546411769843126,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6300463269358041,
						"acc_stderr,none": 0.06304309359131462,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5916611515552614,
						"acc_stderr,none": 0.012649064392162165,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7835870284579749,
						"acc_stderr,none": 0.010597338079182233,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7174056915949703,
						"acc_stderr,none": 0.011587123627044841,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5665122435473197,
						"acc_stderr,none": 0.012752771973917618,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6015883520847121,
						"acc_stderr,none": 0.012598743938252865,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6671078755790867,
						"acc_stderr,none": 0.012127221798743735,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5433487756452681,
						"acc_stderr,none": 0.012818676452481957,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.684976836532098,
						"acc_stderr,none": 0.011954205387840937,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5506287227001986,
						"acc_stderr,none": 0.012800991591293375,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.587028457974851,
						"acc_stderr,none": 0.012670716290966718,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6366644606221046,
						"acc_stderr,none": 0.012377153306613275,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8188356934142504,
						"acc_stderr,none": 0.037241161597448925,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8791397849462366,
						"acc_stderr,none": 0.006761648439355467,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.05066394254941721,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7518248175182481,
						"acc_stderr,none": 0.013955800392484948,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.779467680608365,
						"acc_stderr,none": 0.025614420399944934,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6888888888888889,
						"acc_stderr,none": 0.02612567541895451,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7896825396825397,
						"acc_stderr,none": 0.018171046497690278,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C0-rwkv-140_pth"
	},
	"./rwkv-x-dev/1_3-C0-rwkv-153_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.640924464487035,
						"acc_norm,none": 0.6417700112739572,
						"acc_norm_stderr,none": 0.08667959766514972,
						"acc_stderr,none": 0.10978513582736256,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4846875,
						"acc_stderr,none": 0.05245374982270536,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.06335,
						"acc_stderr,none": 0.035839493973076814,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8121641791044776,
						"acc_stderr,none": 0.16808198705321234,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2979197622585438,
						"acc_norm,none": 0.2979197622585438,
						"acc_norm_stderr,none": 0.1271422767965309,
						"acc_stderr,none": 0.1271422767965309,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.3172163702296667,
						"acc_norm,none": 0.3172163702296667,
						"acc_norm_stderr,none": 0.06070264393546453,
						"acc_stderr,none": 0.06070264393546453,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.462600626118068,
						"likelihood_diff_stderr,none": 0.5183192018360147,
						"pct_stereotype,none": 0.6101669648181276,
						"pct_stereotype_stderr,none": 0.07122285281869169
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.16190944881889763,
						"exact_match_stderr,none": 0.008173848946411315
					},
					"glue": {
						"acc,none": 0.7463226536445927,
						"acc_stderr,none": 0.004632295127397174,
						"alias": "glue",
						"f1,none": 0.7209308918912335,
						"f1_stderr,none": 0.00010119392461951488,
						"mcc,none": 0.23745213262842438,
						"mcc_stderr,none": 0.028667352713257672
					},
					"kmmlu": {
						"acc,none": 0.2706035229569737,
						"acc_norm,none": 0.2706035229569737,
						"acc_norm_stderr,none": 0.02538220750206898,
						"acc_stderr,none": 0.02538220750206898,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5235693926770445,
						"acc_norm,none": 0.56,
						"acc_norm_stderr,none": 0.0004937875751502988,
						"acc_stderr,none": 0.04361370704003251,
						"alias": "kobest",
						"f1,none": 0.42622419551047547,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7188045798563943,
						"acc_stderr,none": 0.01754842519651219,
						"alias": "lambada",
						"perplexity,none": 3.5378440042741315,
						"perplexity_stderr,none": 0.18132629044339282
					},
					"lambada_cloze": {
						"acc,none": 0.07151174073355326,
						"acc_stderr,none": 0.01626036115272726,
						"alias": "lambada_cloze",
						"perplexity,none": 282.2971794058502,
						"perplexity_stderr,none": 14.68966100916887
					},
					"lambada_multilingual": {
						"acc,none": 0.5398796817387929,
						"acc_stderr,none": 0.08647982620907459,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.60852524058511,
						"perplexity_stderr,none": 8.10816671904947
					},
					"mmlu": {
						"acc,none": 0.4014385415183022,
						"acc_stderr,none": 0.0897128824001038,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.38257173219978746,
						"acc_stderr,none": 0.09316585375119292,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.45767621499839073,
						"acc_stderr,none": 0.08692072405248119,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.43971400714982123,
						"acc_stderr,none": 0.07899702446347236,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3368220742150333,
						"acc_stderr,none": 0.06654747144700947,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.38339247693399575,
						"acc_norm,none": 0.35711891269397855,
						"acc_norm_stderr,none": 0.00011988623747615252,
						"acc_stderr,none": 0.07896421243904415,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.485,
						"acc_stderr,none": 0.0545442559685253,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7368683907637892,
						"acc_norm,none": 0.6457009303760348,
						"acc_norm_stderr,none": 0.009661402616168815,
						"acc_stderr,none": 0.1580739554751721,
						"alias": "pythia",
						"bits_per_byte,none": 0.6341699828756252,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5520445645582275,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2048805862208747,
						"perplexity_stderr,none": 0.06200538168257783,
						"word_perplexity,none": 10.491817262984204,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3953900709219858,
						"acc_norm,none": 0.4432624113475177,
						"acc_norm_stderr,none": 0.06513447907567263,
						"acc_stderr,none": 0.04561195848693889,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.7125220458553791,
						"acc_stderr,none": 0.07378718981280916,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.32677678563506285,
						"acc_stderr,none": 0.0016085252726812085,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3329253365973072,
						"bleu_acc_stderr,none": 0.01649740238201205,
						"bleu_diff,none": -6.895256987188668,
						"bleu_diff_stderr,none": 0.8576771259880862,
						"bleu_max,none": 26.51497918105772,
						"bleu_max_stderr,none": 0.805806828659781,
						"rouge1_acc,none": 0.3011015911872705,
						"rouge1_acc_stderr,none": 0.01605899902610061,
						"rouge1_diff,none": -9.040128092055529,
						"rouge1_diff_stderr,none": 0.9376970064446629,
						"rouge1_max,none": 51.50410778909885,
						"rouge1_max_stderr,none": 0.8865691112640895,
						"rouge2_acc,none": 0.26193390452876375,
						"rouge2_acc_stderr,none": 0.015392118805015025,
						"rouge2_diff,none": -10.701498671702987,
						"rouge2_diff_stderr,none": 1.1273527724275378,
						"rouge2_max,none": 35.337287621187315,
						"rouge2_max_stderr,none": 1.0289730385388947,
						"rougeL_acc,none": 0.2937576499388005,
						"rougeL_acc_stderr,none": 0.015945068581236614,
						"rougeL_diff,none": -9.337238344121152,
						"rougeL_diff_stderr,none": 0.9506180235729325,
						"rougeL_max,none": 48.63327555013695,
						"rougeL_max_stderr,none": 0.9038885289700457
					},
					"xcopa": {
						"acc,none": 0.6234545454545455,
						"acc_stderr,none": 0.07090024977589493,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43745649263721553,
						"acc_stderr,none": 0.051978434113395235,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6298658323807231,
						"acc_stderr,none": 0.06288941167526697,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8199595414699933,
						"acc_stderr,none": 0.036530839797964496,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.640924464487035,
						"acc_norm,none": 0.6417700112739572,
						"acc_norm_stderr,none": 0.08667959766514972,
						"acc_stderr,none": 0.10978513582736256,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4846875,
						"acc_stderr,none": 0.05245374982270536,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.593,
						"acc_stderr,none": 0.015543249100255544,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.433,
						"acc_stderr,none": 0.01567663091218133,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.4375,
						"acc_stderr,none": 0.014326519674074474,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4087030716723549,
						"acc_norm,none": 0.4590443686006826,
						"acc_norm_stderr,none": 0.014562291073601224,
						"acc_stderr,none": 0.014365750345426998,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7554713804713805,
						"acc_norm,none": 0.7319023569023569,
						"acc_norm_stderr,none": 0.009089526578213698,
						"acc_stderr,none": 0.008819461106822593,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.06335,
						"acc_stderr,none": 0.035839493973076814,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.052,
						"acc_stderr,none": 0.004965916850399547,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.129,
						"acc_stderr,none": 0.007497173054018462,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0705,
						"acc_stderr,none": 0.005725492610493554,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0245,
						"acc_stderr,none": 0.0034577236625362353,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.02,
						"acc_stderr,none": 0.003131278085898063,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.097,
						"acc_stderr,none": 0.006619471935460818,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0075,
						"acc_stderr,none": 0.001929698647051984,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.144,
						"acc_stderr,none": 0.007852568459791183,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0045,
						"acc_stderr,none": 0.001496995490223334,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0845,
						"acc_stderr,none": 0.006220870084827896,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.004772234273318872,
						"acc_stderr,none": 0.0014357568013434012,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8121641791044776,
						"acc_stderr,none": 0.16808198705321234,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524272,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319409,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298133,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235232,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.789,
						"acc_stderr,none": 0.012909130321042092,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.577,
						"acc_stderr,none": 0.015630589090476345,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525049,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397353,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469362,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256562,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.0072129762946392326,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380719,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248092,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487902,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727082,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151094,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.014683991951087981,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.01468399195108797,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.721,
						"acc_stderr,none": 0.014190150117612032,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474921,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823328,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178328,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.469,
						"acc_stderr,none": 0.015788865959539006,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783207,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.818,
						"acc_stderr,none": 0.012207580637662177,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.015615500115072957,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.014297146862517908,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713329,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523689,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783224,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.667,
						"acc_stderr,none": 0.014910846164229856,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753651,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.379,
						"acc_stderr,none": 0.015349091002225347,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858924,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.015726771166750357,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877364,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.015768596914394382,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.00982000165134569,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.00940661918462122,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.013607356839598123,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.00848457353011859,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.002987963843142671,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.814,
						"acc_stderr,none": 0.012310790208412798,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.601,
						"acc_stderr,none": 0.015493193313162906,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919309,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397344,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178335,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.673,
						"acc_stderr,none": 0.014842213153411242,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.446,
						"acc_stderr,none": 0.015726771166750357,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.01057013376110866,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745904,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858918,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341678,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713329,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.511,
						"acc_stderr,none": 0.015815471195292682,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.837,
						"acc_stderr,none": 0.011686212712746832,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.00751375115747492,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866435,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.0046408552592747026,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656803,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.362,
						"acc_stderr,none": 0.0152048409129195,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.317,
						"acc_stderr,none": 0.014721675438880224,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7103975535168195,
						"acc_stderr,none": 0.00793312653543031,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8571428571428571,
						"acc_stderr,none": 0.04718416136255828,
						"alias": "cb",
						"f1,none": 0.6869845948696355,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2979197622585438,
						"acc_norm,none": 0.2979197622585438,
						"acc_norm_stderr,none": 0.1271422767965309,
						"acc_stderr,none": 0.1271422767965309,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141221,
						"acc_stderr,none": 0.06372446937141221,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522557,
						"acc_stderr,none": 0.11369720523522557,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5151515151515151,
						"acc_norm,none": 0.5151515151515151,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122592,
						"acc_stderr,none": 0.08503766788122592,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.23404255319148937,
						"acc_norm,none": 0.23404255319148937,
						"acc_norm_stderr,none": 0.062426763436828805,
						"acc_stderr,none": 0.062426763436828805,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445796,
						"acc_stderr,none": 0.06180629713445796,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.45,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.11413288653790232,
						"acc_stderr,none": 0.11413288653790232,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.1008316903303367,
						"acc_stderr,none": 0.1008316903303367,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4583333333333333,
						"acc_norm,none": 0.4583333333333333,
						"acc_norm_stderr,none": 0.10389457216622949,
						"acc_stderr,none": 0.10389457216622949,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894599,
						"acc_stderr,none": 0.10540925533894599,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966263,
						"acc_stderr,none": 0.06520506636966263,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5909090909090909,
						"acc_norm,none": 0.5909090909090909,
						"acc_norm_stderr,none": 0.10729033533674225,
						"acc_stderr,none": 0.10729033533674225,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033673,
						"acc_stderr,none": 0.10083169033033673,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633637,
						"acc_stderr,none": 0.09169709590633637,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.058172215566282534,
						"acc_stderr,none": 0.058172215566282534,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.07624928516630235,
						"acc_stderr,none": 0.07624928516630235,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.391304347826087,
						"acc_norm,none": 0.391304347826087,
						"acc_norm_stderr,none": 0.07275304578557182,
						"acc_stderr,none": 0.07275304578557182,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.3172163702296667,
						"acc_norm,none": 0.3172163702296667,
						"acc_norm_stderr,none": 0.06070264393546453,
						"acc_stderr,none": 0.06070264393546453,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.30177514792899407,
						"acc_norm,none": 0.30177514792899407,
						"acc_norm_stderr,none": 0.03541479614288122,
						"acc_stderr,none": 0.03541479614288122,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.036628698766429046,
						"acc_stderr,none": 0.036628698766429046,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.43125,
						"acc_norm,none": 0.43125,
						"acc_norm_stderr,none": 0.03927594984018917,
						"acc_stderr,none": 0.03927594984018917,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.28484848484848485,
						"acc_norm,none": 0.28484848484848485,
						"acc_norm_stderr,none": 0.035243908445117836,
						"acc_stderr,none": 0.035243908445117836,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3349282296650718,
						"acc_norm,none": 0.3349282296650718,
						"acc_norm_stderr,none": 0.03272491043051241,
						"acc_stderr,none": 0.03272491043051241,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.036342189215581536,
						"acc_stderr,none": 0.036342189215581536,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.33587786259541985,
						"acc_norm,none": 0.33587786259541985,
						"acc_norm_stderr,none": 0.04142313771996663,
						"acc_stderr,none": 0.04142313771996663,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.35294117647058826,
						"acc_norm,none": 0.35294117647058826,
						"acc_norm_stderr,none": 0.04112975875177067,
						"acc_stderr,none": 0.04112975875177067,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.34579439252336447,
						"acc_norm,none": 0.34579439252336447,
						"acc_norm_stderr,none": 0.0461969359662258,
						"acc_stderr,none": 0.0461969359662258,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3746130030959752,
						"acc_norm,none": 0.3746130030959752,
						"acc_norm_stderr,none": 0.026973537815032235,
						"acc_stderr,none": 0.026973537815032235,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28921568627450983,
						"acc_norm,none": 0.28921568627450983,
						"acc_norm_stderr,none": 0.03182231867647555,
						"acc_stderr,none": 0.03182231867647555,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3854748603351955,
						"acc_norm,none": 0.3854748603351955,
						"acc_norm_stderr,none": 0.036480254192943644,
						"acc_stderr,none": 0.036480254192943644,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422644,
						"acc_stderr,none": 0.028146970599422644,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.4485981308411215,
						"acc_norm,none": 0.4485981308411215,
						"acc_norm_stderr,none": 0.04830698295619321,
						"acc_stderr,none": 0.04830698295619321,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.37735849056603776,
						"acc_norm,none": 0.37735849056603776,
						"acc_norm_stderr,none": 0.04730439022852894,
						"acc_stderr,none": 0.04730439022852894,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809826,
						"acc_stderr,none": 0.039578354719809826,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.041764667586049034,
						"acc_stderr,none": 0.041764667586049034,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.28205128205128205,
						"acc_norm,none": 0.28205128205128205,
						"acc_norm_stderr,none": 0.02728514708163732,
						"acc_stderr,none": 0.02728514708163732,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.35294117647058826,
						"acc_norm,none": 0.35294117647058826,
						"acc_norm_stderr,none": 0.03354092437591518,
						"acc_stderr,none": 0.03354092437591518,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2807017543859649,
						"acc_norm,none": 0.2807017543859649,
						"acc_norm_stderr,none": 0.034462962170884265,
						"acc_stderr,none": 0.034462962170884265,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.3129251700680272,
						"acc_norm,none": 0.3129251700680272,
						"acc_norm_stderr,none": 0.03837477482026868,
						"acc_stderr,none": 0.03837477482026868,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2733812949640288,
						"acc_norm,none": 0.2733812949640288,
						"acc_norm_stderr,none": 0.0379400712153362,
						"acc_stderr,none": 0.0379400712153362,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.37735849056603776,
						"acc_norm,none": 0.37735849056603776,
						"acc_norm_stderr,none": 0.03856271073542805,
						"acc_stderr,none": 0.03856271073542805,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.36809815950920244,
						"acc_norm,none": 0.36809815950920244,
						"acc_norm_stderr,none": 0.03789213935838396,
						"acc_stderr,none": 0.03789213935838396,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.313953488372093,
						"acc_norm,none": 0.313953488372093,
						"acc_norm_stderr,none": 0.03549043982227173,
						"acc_stderr,none": 0.03549043982227173,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2896825396825397,
						"acc_norm,none": 0.2896825396825397,
						"acc_norm_stderr,none": 0.028631924753360995,
						"acc_stderr,none": 0.028631924753360995,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2878787878787879,
						"acc_norm,none": 0.2878787878787879,
						"acc_norm_stderr,none": 0.03225883512300992,
						"acc_stderr,none": 0.03225883512300992,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.4495798319327731,
						"acc_norm,none": 0.4495798319327731,
						"acc_norm_stderr,none": 0.03231293497137707,
						"acc_stderr,none": 0.03231293497137707,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24782608695652175,
						"acc_norm,none": 0.24782608695652175,
						"acc_norm_stderr,none": 0.02853086259541007,
						"acc_stderr,none": 0.02853086259541007,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.31851851851851853,
						"acc_norm,none": 0.31851851851851853,
						"acc_norm_stderr,none": 0.04024778401977111,
						"acc_stderr,none": 0.04024778401977111,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3146853146853147,
						"acc_norm,none": 0.3146853146853147,
						"acc_norm_stderr,none": 0.038970778815104114,
						"acc_stderr,none": 0.038970778815104114,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.3465909090909091,
						"acc_norm,none": 0.3465909090909091,
						"acc_norm_stderr,none": 0.0359734545643587,
						"acc_stderr,none": 0.0359734545643587,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3288590604026846,
						"acc_norm,none": 0.3288590604026846,
						"acc_norm_stderr,none": 0.03861721178313762,
						"acc_stderr,none": 0.03861721178313762,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3050847457627119,
						"acc_norm,none": 0.3050847457627119,
						"acc_norm_stderr,none": 0.04256799926288004,
						"acc_stderr,none": 0.04256799926288004,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2865853658536585,
						"acc_norm,none": 0.2865853658536585,
						"acc_norm_stderr,none": 0.03541638332993505,
						"acc_stderr,none": 0.03541638332993505,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.041220665028782834,
						"acc_stderr,none": 0.041220665028782834,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.34965034965034963,
						"acc_norm,none": 0.34965034965034963,
						"acc_norm_stderr,none": 0.04001716028382393,
						"acc_stderr,none": 0.04001716028382393,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.30952380952380953,
						"acc_norm,none": 0.30952380952380953,
						"acc_norm_stderr,none": 0.04134913018303316,
						"acc_stderr,none": 0.04134913018303316,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336699,
						"acc_stderr,none": 0.03333068663336699,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.37790697674418605,
						"acc_norm,none": 0.37790697674418605,
						"acc_norm_stderr,none": 0.03707849218723281,
						"acc_stderr,none": 0.03707849218723281,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2822384428223844,
						"acc_norm,none": 0.2822384428223844,
						"acc_norm_stderr,none": 0.02222830014542446,
						"acc_stderr,none": 0.02222830014542446,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.4532710280373832,
						"acc_norm,none": 0.4532710280373832,
						"acc_norm_stderr,none": 0.03410948976343204,
						"acc_stderr,none": 0.03410948976343204,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3252032520325203,
						"acc_norm,none": 0.3252032520325203,
						"acc_norm_stderr,none": 0.042411537335732975,
						"acc_stderr,none": 0.042411537335732975,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.29508196721311475,
						"acc_norm,none": 0.29508196721311475,
						"acc_norm_stderr,none": 0.04146178164901212,
						"acc_stderr,none": 0.04146178164901212,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.34285714285714286,
						"acc_norm,none": 0.34285714285714286,
						"acc_norm_stderr,none": 0.03283321069643155,
						"acc_stderr,none": 0.03283321069643155,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.32222222222222224,
						"acc_norm,none": 0.32222222222222224,
						"acc_norm_stderr,none": 0.03492970288642683,
						"acc_stderr,none": 0.03492970288642683,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.32275132275132273,
						"acc_norm,none": 0.32275132275132273,
						"acc_norm_stderr,none": 0.03409802097064963,
						"acc_stderr,none": 0.03409802097064963,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.29310344827586204,
						"acc_norm,none": 0.29310344827586204,
						"acc_norm_stderr,none": 0.04244626443180183,
						"acc_stderr,none": 0.04244626443180183,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2896551724137931,
						"acc_norm,none": 0.2896551724137931,
						"acc_norm_stderr,none": 0.03780019230438015,
						"acc_stderr,none": 0.03780019230438015,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.34285714285714286,
						"acc_norm,none": 0.34285714285714286,
						"acc_norm_stderr,none": 0.04654465622977446,
						"acc_stderr,none": 0.04654465622977446,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.29714285714285715,
						"acc_norm,none": 0.29714285714285715,
						"acc_norm_stderr,none": 0.034645078898843724,
						"acc_stderr,none": 0.034645078898843724,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27488151658767773,
						"acc_norm,none": 0.27488151658767773,
						"acc_norm_stderr,none": 0.030808291124780323,
						"acc_stderr,none": 0.030808291124780323,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.26595744680851063,
						"acc_norm,none": 0.26595744680851063,
						"acc_norm_stderr,none": 0.022816607010135295,
						"acc_stderr,none": 0.022816607010135295,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.33620689655172414,
						"acc_norm,none": 0.33620689655172414,
						"acc_norm_stderr,none": 0.031082338581586128,
						"acc_stderr,none": 0.031082338581586128,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3505747126436782,
						"acc_norm,none": 0.3505747126436782,
						"acc_norm_stderr,none": 0.03627703962615275,
						"acc_stderr,none": 0.03627703962615275,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.3037037037037037,
						"acc_norm,none": 0.3037037037037037,
						"acc_norm_stderr,none": 0.039725528847851375,
						"acc_stderr,none": 0.039725528847851375,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.35398230088495575,
						"acc_norm,none": 0.35398230088495575,
						"acc_norm_stderr,none": 0.0318802503506933,
						"acc_stderr,none": 0.0318802503506933,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3575757575757576,
						"acc_norm,none": 0.3575757575757576,
						"acc_norm_stderr,none": 0.03742597043806586,
						"acc_stderr,none": 0.03742597043806586,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.032530209055933366,
						"acc_stderr,none": 0.032530209055933366,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.33136094674556216,
						"acc_norm,none": 0.33136094674556216,
						"acc_norm_stderr,none": 0.0363154884408717,
						"acc_stderr,none": 0.0363154884408717,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.36645962732919257,
						"acc_norm,none": 0.36645962732919257,
						"acc_norm_stderr,none": 0.03809256561874491,
						"acc_stderr,none": 0.03809256561874491,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.35625,
						"acc_norm,none": 0.35625,
						"acc_norm_stderr,none": 0.03797847267587851,
						"acc_stderr,none": 0.03797847267587851,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.23745213262842438,
						"mcc_stderr,none": 0.028667352713257672
					},
					"copa": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.036845294917747094,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.462600626118068,
						"likelihood_diff_stderr,none": 0.5183192018360147,
						"pct_stereotype,none": 0.6101669648181276,
						"pct_stereotype_stderr,none": 0.07122285281869169
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.711314847942755,
						"likelihood_diff_stderr,none": 0.08755211077275812,
						"pct_stereotype,none": 0.6446034585569469,
						"pct_stereotype_stderr,none": 0.01169138351745123
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.045329670329671,
						"likelihood_diff_stderr,none": 0.38865805570256967,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.045454545454546,
						"likelihood_diff_stderr,none": 1.8305544930610822,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.976923076923077,
						"likelihood_diff_stderr,none": 0.6078839765893511,
						"pct_stereotype,none": 0.7692307692307693,
						"pct_stereotype_stderr,none": 0.052665630529342915
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.6890625,
						"likelihood_diff_stderr,none": 0.1674666885637471,
						"pct_stereotype,none": 0.628125,
						"pct_stereotype_stderr,none": 0.02705990013900488
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.5775462962962963,
						"likelihood_diff_stderr,none": 0.24367874406423248,
						"pct_stereotype,none": 0.5879629629629629,
						"pct_stereotype_stderr,none": 0.03356787758160831
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.9635416666666665,
						"likelihood_diff_stderr,none": 0.319660123042424,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.560531496062992,
						"likelihood_diff_stderr,none": 0.1534016665584983,
						"pct_stereotype,none": 0.5590551181102362,
						"pct_stereotype_stderr,none": 0.02205034999632727
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.6779279279279278,
						"likelihood_diff_stderr,none": 0.34568220286060974,
						"pct_stereotype,none": 0.6936936936936937,
						"pct_stereotype_stderr,none": 0.04395066997351522
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.076612903225806,
						"likelihood_diff_stderr,none": 0.44481270886025137,
						"pct_stereotype,none": 0.9139784946236559,
						"pct_stereotype_stderr,none": 0.029233283218071043
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.226315789473684,
						"likelihood_diff_stderr,none": 0.24374232751758126,
						"pct_stereotype,none": 0.6789473684210526,
						"pct_stereotype_stderr,none": 0.03396059335824887
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.2100849731663685,
						"likelihood_diff_stderr,none": 0.07458381456263273,
						"pct_stereotype,none": 0.5760286225402504,
						"pct_stereotype_stderr,none": 0.01207127916836165
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.3569444444444443,
						"likelihood_diff_stderr,none": 0.31587942450679546,
						"pct_stereotype,none": 0.5777777777777777,
						"pct_stereotype_stderr,none": 0.05235473399540657
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.9423076923076925,
						"likelihood_diff_stderr,none": 0.9010116492645661,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.8522727272727275,
						"likelihood_diff_stderr,none": 0.4350594523290097,
						"pct_stereotype,none": 0.696969696969697,
						"pct_stereotype_stderr,none": 0.057002420795512765
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.7110591900311527,
						"likelihood_diff_stderr,none": 0.12920005685430047,
						"pct_stereotype,none": 0.5700934579439252,
						"pct_stereotype_stderr,none": 0.027674836672590703
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.277667984189723,
						"likelihood_diff_stderr,none": 0.1947277158157939,
						"pct_stereotype,none": 0.41106719367588934,
						"pct_stereotype_stderr,none": 0.030994812415369746
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.4791666666666665,
						"likelihood_diff_stderr,none": 0.4358761502859574,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.05594542388644592
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.8630434782608694,
						"likelihood_diff_stderr,none": 0.14075327362057266,
						"pct_stereotype,none": 0.4934782608695652,
						"pct_stereotype_stderr,none": 0.023336016041798566
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.4315217391304347,
						"likelihood_diff_stderr,none": 0.2783815542660466,
						"pct_stereotype,none": 0.7565217391304347,
						"pct_stereotype_stderr,none": 0.04019651260878071
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.5604395604395602,
						"likelihood_diff_stderr,none": 0.3173440956130456,
						"pct_stereotype,none": 0.8021978021978022,
						"pct_stereotype_stderr,none": 0.041988952031962214
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.7745535714285716,
						"likelihood_diff_stderr,none": 0.24543990596641616,
						"pct_stereotype,none": 0.7142857142857143,
						"pct_stereotype_stderr,none": 0.032350772404131325
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.16190944881889763,
						"exact_match_stderr,none": 0.008173848946411315
					},
					"glue": {
						"acc,none": 0.7463226536445927,
						"acc_stderr,none": 0.004632295127397174,
						"alias": "glue",
						"f1,none": 0.7209308918912335,
						"f1_stderr,none": 0.00010119392461951488,
						"mcc,none": 0.23745213262842438,
						"mcc_stderr,none": 0.028667352713257672
					},
					"hellaswag": {
						"acc,none": 0.5411272654849631,
						"acc_norm,none": 0.7298346942840072,
						"acc_norm_stderr,none": 0.004431375549911357,
						"acc_stderr,none": 0.004972872811662292,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2706035229569737,
						"acc_norm,none": 0.2706035229569737,
						"acc_norm_stderr,none": 0.02538220750206898,
						"acc_stderr,none": 0.02538220750206898,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909283,
						"acc_stderr,none": 0.04292346959909283,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729486,
						"acc_stderr,none": 0.014013292702729486,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.014095022868717607,
						"acc_stderr,none": 0.014095022868717607,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.01364467578131413,
						"acc_stderr,none": 0.01364467578131413,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.01397996564514515,
						"acc_stderr,none": 0.01397996564514515,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.26166666666666666,
						"acc_norm,none": 0.26166666666666666,
						"acc_norm_stderr,none": 0.01795920168731842,
						"acc_stderr,none": 0.01795920168731842,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.014095022868717597,
						"acc_stderr,none": 0.014095022868717597,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.324,
						"acc_norm,none": 0.324,
						"acc_norm_stderr,none": 0.014806864733738859,
						"acc_stderr,none": 0.014806864733738859,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.014370995982377939,
						"acc_stderr,none": 0.014370995982377939,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.032166339033750324,
						"acc_stderr,none": 0.032166339033750324,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.309,
						"acc_norm,none": 0.309,
						"acc_norm_stderr,none": 0.014619600977206484,
						"acc_stderr,none": 0.014619600977206484,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2153846153846154,
						"acc_norm,none": 0.2153846153846154,
						"acc_norm_stderr,none": 0.03619435936612662,
						"acc_stderr,none": 0.03619435936612662,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.04461960433384741,
						"acc_stderr,none": 0.04461960433384741,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.01391220865102136,
						"acc_stderr,none": 0.01391220865102136,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.01442855443844552,
						"acc_stderr,none": 0.01442855443844552,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.01364467578131413,
						"acc_stderr,none": 0.01364467578131413,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145153,
						"acc_stderr,none": 0.013979965645145153,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796275,
						"acc_stderr,none": 0.013996674851796275,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.013569640199177453,
						"acc_stderr,none": 0.013569640199177453,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145155,
						"acc_stderr,none": 0.013979965645145155,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361427,
						"acc_stderr,none": 0.014498627873361427,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.04408440022768078,
						"acc_stderr,none": 0.04408440022768078,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462625,
						"acc_stderr,none": 0.014078856992462625,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.279,
						"acc_norm,none": 0.279,
						"acc_norm_stderr,none": 0.014190150117612028,
						"acc_stderr,none": 0.014190150117612028,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.014221154708434948,
						"acc_stderr,none": 0.014221154708434948,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.013772206565168537,
						"acc_stderr,none": 0.013772206565168537,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.01396316475480995,
						"acc_stderr,none": 0.01396316475480995,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.01364467578131412,
						"acc_stderr,none": 0.01364467578131412,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.24333333333333335,
						"acc_norm,none": 0.24333333333333335,
						"acc_norm_stderr,none": 0.017532332270077985,
						"acc_stderr,none": 0.017532332270077985,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314125,
						"acc_stderr,none": 0.013644675781314125,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.279,
						"acc_norm,none": 0.279,
						"acc_norm_stderr,none": 0.01419015011761203,
						"acc_stderr,none": 0.01419015011761203,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.013825416526895033,
						"acc_stderr,none": 0.013825416526895033,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.284,
						"acc_norm,none": 0.284,
						"acc_norm_stderr,none": 0.014267009061031309,
						"acc_stderr,none": 0.014267009061031309,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.04163331998932269,
						"acc_stderr,none": 0.04163331998932269,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.27666666666666667,
						"acc_norm,none": 0.27666666666666667,
						"acc_norm_stderr,none": 0.0258709313911235,
						"acc_stderr,none": 0.0258709313911235,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.013860415257527911,
						"acc_stderr,none": 0.013860415257527911,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.014282120955200489,
						"acc_stderr,none": 0.014282120955200489,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568198,
						"acc_stderr,none": 0.014029819522568198,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.030275120389073037,
						"acc_stderr,none": 0.030275120389073037,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.237,
						"acc_norm,none": 0.237,
						"acc_norm_stderr,none": 0.013454070462577948,
						"acc_stderr,none": 0.013454070462577948,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729493,
						"acc_stderr,none": 0.014013292702729493,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.03048807329211421,
						"acc_stderr,none": 0.03048807329211421,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.323,
						"acc_norm,none": 0.323,
						"acc_norm_stderr,none": 0.014794927843348637,
						"acc_stderr,none": 0.014794927843348637,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5235693926770445,
						"acc_norm,none": 0.56,
						"acc_norm_stderr,none": 0.0004937875751502988,
						"acc_stderr,none": 0.04361370704003251,
						"alias": "kobest",
						"f1,none": 0.42622419551047547,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5235042735042735,
						"acc_stderr,none": 0.013334010264781466,
						"alias": " - kobest_boolq",
						"f1,none": 0.388864200502949,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": " - kobest_copa",
						"f1,none": 0.6174817852054806,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.426,
						"acc_norm,none": 0.56,
						"acc_norm_stderr,none": 0.022221331534143015,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.42041423225009894,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5163727959697733,
						"acc_stderr,none": 0.02511247082204795,
						"alias": " - kobest_sentineg",
						"f1,none": 0.38588462777956817,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4888888888888889,
						"acc_stderr,none": 0.014088017407699532,
						"alias": " - kobest_wic",
						"f1,none": 0.3310778727445394,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7188045798563943,
						"acc_stderr,none": 0.01754842519651219,
						"alias": "lambada",
						"perplexity,none": 3.5378440042741315,
						"perplexity_stderr,none": 0.18132629044339282
					},
					"lambada_cloze": {
						"acc,none": 0.07151174073355326,
						"acc_stderr,none": 0.01626036115272726,
						"alias": "lambada_cloze",
						"perplexity,none": 282.2971794058502,
						"perplexity_stderr,none": 14.68966100916887
					},
					"lambada_multilingual": {
						"acc,none": 0.5398796817387929,
						"acc_stderr,none": 0.08647982620907459,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.60852524058511,
						"perplexity_stderr,none": 8.10816671904947
					},
					"lambada_openai": {
						"acc,none": 0.7523772559673977,
						"acc_stderr,none": 0.0060134702680132044,
						"alias": " - lambada_openai",
						"perplexity,none": 3.2048805862208747,
						"perplexity_stderr,none": 0.06200538168257783
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.03978265088298079,
						"acc_stderr,none": 0.0027229753280860573,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 258.7086522436224,
						"perplexity_stderr,none": 7.971346143292533
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42907044440131964,
						"acc_stderr,none": 0.006895529739245136,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.07480537186791,
						"perplexity_stderr,none": 1.894506228545802
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7517950708325247,
						"acc_stderr,none": 0.006018205466012235,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.204724358901203,
						"perplexity_stderr,none": 0.06187653492310424
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45507471375897535,
						"acc_stderr,none": 0.0069378020701983565,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.0604025888277,
						"perplexity_stderr,none": 1.3810379145474596
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5490005821851349,
						"acc_stderr,none": 0.006932445530803898,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.27928162588669,
						"perplexity_stderr,none": 0.7892873494097484
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5144575975160101,
						"acc_stderr,none": 0.006963064961328419,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.423412257442042,
						"perplexity_stderr,none": 1.134096107736529
					},
					"lambada_standard": {
						"acc,none": 0.6860081505918882,
						"acc_stderr,none": 0.006466002944831248,
						"alias": " - lambada_standard",
						"perplexity,none": 3.8720949114695222,
						"perplexity_stderr,none": 0.07788154787735543
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.10324083058412575,
						"acc_stderr,none": 0.004239120196418171,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 305.885706568078,
						"perplexity_stderr,none": 9.476734598638156
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.339058524173028,
						"exact_match_stderr,get-answer": 0.011943476079102577
					},
					"logiqa": {
						"acc,none": 0.2457757296466974,
						"acc_norm,none": 0.282642089093702,
						"acc_norm_stderr,none": 0.017661585370360625,
						"acc_stderr,none": 0.016887410894296923,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2595419847328244,
						"acc_norm,none": 0.2900763358778626,
						"acc_norm_stderr,none": 0.0114491668492253,
						"acc_stderr,none": 0.011060275310259944,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25996649916247905,
						"acc_norm,none": 0.26566164154103855,
						"acc_norm_stderr,none": 0.008085616216226048,
						"acc_stderr,none": 0.008029434758777931,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.46324931158652827,
						"acc_stderr,none": 0.0051319769552941406,
						"alias": "mc_taco",
						"f1,none": 0.545389307499103,
						"f1_stderr,none": 0.005702899454502946
					},
					"medmcqa": {
						"acc,none": 0.34592397800621566,
						"acc_norm,none": 0.34592397800621566,
						"acc_norm_stderr,none": 0.00735550395376213,
						"acc_stderr,none": 0.00735550395376213,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.373134328358209,
						"acc_norm,none": 0.373134328358209,
						"acc_norm_stderr,none": 0.013560518364022974,
						"acc_stderr,none": 0.013560518364022974,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.4014385415183022,
						"acc_stderr,none": 0.0897128824001038,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.04232073695151589,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3815789473684211,
						"acc_stderr,none": 0.03953173377749193,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.42641509433962266,
						"acc_stderr,none": 0.030437794342983045,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3611111111111111,
						"acc_stderr,none": 0.040166600304512336,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.37572254335260113,
						"acc_stderr,none": 0.036928207672648664,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171452,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.05009082659620333,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.35319148936170214,
						"acc_stderr,none": 0.031245325202761926,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03947152782669415,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.04082482904638628,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.30158730158730157,
						"acc_stderr,none": 0.023636975996101813,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3412698412698413,
						"acc_stderr,none": 0.04240799327574924,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252603,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.44516129032258067,
						"acc_stderr,none": 0.02827241018621491,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.30049261083743845,
						"acc_stderr,none": 0.03225799476233485,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.049431107042371025,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5636363636363636,
						"acc_stderr,none": 0.03872592983524754,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.43434343434343436,
						"acc_stderr,none": 0.03531505879359183,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5544041450777202,
						"acc_stderr,none": 0.03587014986075661,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3384615384615385,
						"acc_stderr,none": 0.023991500500313036,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3037037037037037,
						"acc_stderr,none": 0.028037929969114993,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3445378151260504,
						"acc_stderr,none": 0.030868682604121626,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2185430463576159,
						"acc_stderr,none": 0.033742355504256936,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5211009174311927,
						"acc_stderr,none": 0.021418224754264636,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2638888888888889,
						"acc_stderr,none": 0.030058202704309846,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4950980392156863,
						"acc_stderr,none": 0.03509143375606789,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5991561181434599,
						"acc_stderr,none": 0.031900803894732356,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.42152466367713004,
						"acc_stderr,none": 0.033141902221106564,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5038167938931297,
						"acc_stderr,none": 0.04385162325601553,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.38257173219978746,
						"acc_stderr,none": 0.09316585375119292,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.45454545454545453,
						"acc_stderr,none": 0.04545454545454546,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.4537037037037037,
						"acc_stderr,none": 0.048129173245368216,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.37423312883435583,
						"acc_stderr,none": 0.03802068102899615,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3125,
						"acc_stderr,none": 0.043994650575715215,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5048543689320388,
						"acc_stderr,none": 0.04950504382128921,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6581196581196581,
						"acc_stderr,none": 0.03107502852650776,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.04943110704237101,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5925925925925926,
						"acc_stderr,none": 0.017570705239256555,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.407514450867052,
						"acc_stderr,none": 0.0264545781469315,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.21899441340782122,
						"acc_stderr,none": 0.013831676687303205,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.40522875816993464,
						"acc_stderr,none": 0.028110928492809075,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.45767621499839073,
						"acc_stderr,none": 0.08692072405248119,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4983922829581994,
						"acc_stderr,none": 0.02839794490780661,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4506172839506173,
						"acc_stderr,none": 0.027684721415656203,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2907801418439716,
						"acc_stderr,none": 0.027090664368353178,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3324641460234681,
						"acc_stderr,none": 0.012032022332260512,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3860294117647059,
						"acc_stderr,none": 0.029573269134411124,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.41830065359477125,
						"acc_stderr,none": 0.01995597514583555,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.42727272727272725,
						"acc_stderr,none": 0.04738198703545483,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3877551020408163,
						"acc_stderr,none": 0.031192230726795656,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.43971400714982123,
						"acc_stderr,none": 0.07899702446347236,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5920398009950248,
						"acc_stderr,none": 0.03475116365194092,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3368220742150333,
						"acc_stderr,none": 0.06654747144700947,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.05016135580465919,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.03726214354322415,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.631578947368421,
						"acc_stderr,none": 0.036996580176568775,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7927661742231279,
						"acc_stderr,none": 0.004091474522611672,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7985150528885273,
						"acc_stderr,none": 0.004045423644883571,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7156862745098039,
						"acc_stderr,none": 0.022359549679883527,
						"alias": "mrpc",
						"f1,none": 0.824773413897281,
						"f1_stderr,none": 0.016054229800404586
					},
					"multimedqa": {
						"acc,none": 0.38339247693399575,
						"acc_norm,none": 0.35711891269397855,
						"acc_norm_stderr,none": 0.00011988623747615252,
						"acc_stderr,none": 0.07896421243904415,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5037128712871287,
						"acc_stderr,none": 0.0071816050950067245,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7052294973864244,
						"mrr_stderr,none": 0.010332388523218551,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4164785553047404,
						"r@2_stderr,none": 0.01657116712766196
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6560383762987688,
						"mrr_stderr,none": 0.0104442068148811,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750955,
						"r@2,none": 0.4650112866817156,
						"r@2_stderr,none": 0.016766114263692605
					},
					"openbookqa": {
						"acc,none": 0.296,
						"acc_norm,none": 0.418,
						"acc_norm_stderr,none": 0.022080014812228137,
						"acc_stderr,none": 0.020435342091896146,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.437,
						"acc_stderr,none": 0.01109400912741898,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.384,
						"acc_stderr,none": 0.010878012942757032,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.416,
						"acc_stderr,none": 0.011024190055654281,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.547,
						"acc_stderr,none": 0.011133619300989868,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5505,
						"acc_stderr,none": 0.011125950223877364,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5455,
						"acc_stderr,none": 0.011136735987003715,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.515,
						"acc_stderr,none": 0.0111781024770528,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.485,
						"acc_stderr,none": 0.0545442559685253,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7758433079434167,
						"acc_norm,none": 0.7867247007616975,
						"acc_norm_stderr,none": 0.009557121225861338,
						"acc_stderr,none": 0.009729897956410029,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.24519641332194705,
						"acc_norm,none": 0.29077711357813835,
						"acc_norm_stderr,none": 0.0033177574475504483,
						"acc_stderr,none": 0.0031430202957432713,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.682,
						"acc_stderr,none": 0.020847571620814007,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7368683907637892,
						"acc_norm,none": 0.6457009303760348,
						"acc_norm_stderr,none": 0.009661402616168815,
						"acc_stderr,none": 0.1580739554751721,
						"alias": "pythia",
						"bits_per_byte,none": 0.6341699828756252,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5520445645582275,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2048805862208747,
						"perplexity_stderr,none": 0.06200538168257783,
						"word_perplexity,none": 10.491817262984204,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3953900709219858,
						"acc_norm,none": 0.4432624113475177,
						"acc_norm_stderr,none": 0.06513447907567263,
						"acc_stderr,none": 0.04561195848693889,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4583333333333333,
						"acc_norm,none": 0.5666666666666667,
						"acc_norm_stderr,none": 0.04542567625794981,
						"acc_stderr,none": 0.04567549854280212,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.35,
						"acc_norm,none": 0.45625,
						"acc_norm_stderr,none": 0.039500492593059405,
						"acc_stderr,none": 0.0378261498181204,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.39436619718309857,
						"acc_norm,none": 0.38380281690140844,
						"acc_norm_stderr,none": 0.02890817768804618,
						"acc_stderr,none": 0.029051039507650152,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7546376453128865,
						"acc_stderr,none": 0.0021400636920511354,
						"alias": "qqp",
						"f1,none": 0.7200112898673441,
						"f1_stderr,none": 0.0026961754349262303
					},
					"race": {
						"acc,none": 0.34258373205741627,
						"acc_stderr,none": 0.01468768473714516,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2664,
						"em_stderr,none": 0.004420979360486899,
						"f1,none": 0.27581714309453964,
						"f1_stderr,none": 0.004432739203787763
					},
					"rte": {
						"acc,none": 0.6859205776173285,
						"acc_stderr,none": 0.027938437681209072,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.96,
						"acc_norm,none": 0.961,
						"acc_norm_stderr,none": 0.006125072776426136,
						"acc_stderr,none": 0.006199874066337061,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6823104693140795,
						"acc_stderr,none": 0.028024503562454613,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.01120598290257748,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5813755873238029,
						"acc_norm,none": 0.7713186044186744,
						"acc_norm_stderr,none": 0.0029693612896043147,
						"acc_stderr,none": 0.003487959177596885,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.7125220458553791,
						"acc_stderr,none": 0.07378718981280916,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6488381410256411,
						"acc_stderr,none": 0.004777396860519391,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.9088882132360393,
						"acc_stderr,none": 0.0028971565408798896,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5849019607843138,
						"acc_stderr,none": 0.00487908162457488,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.32677678563506285,
						"acc_stderr,none": 0.0016085252726812085,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3329253365973072,
						"bleu_acc_stderr,none": 0.01649740238201205,
						"bleu_diff,none": -6.895256987188668,
						"bleu_diff_stderr,none": 0.8576771259880862,
						"bleu_max,none": 26.51497918105772,
						"bleu_max_stderr,none": 0.805806828659781,
						"rouge1_acc,none": 0.3011015911872705,
						"rouge1_acc_stderr,none": 0.01605899902610061,
						"rouge1_diff,none": -9.040128092055529,
						"rouge1_diff_stderr,none": 0.9376970064446629,
						"rouge1_max,none": 51.50410778909885,
						"rouge1_max_stderr,none": 0.8865691112640895,
						"rouge2_acc,none": 0.26193390452876375,
						"rouge2_acc_stderr,none": 0.015392118805015025,
						"rouge2_diff,none": -10.701498671702987,
						"rouge2_diff_stderr,none": 1.1273527724275378,
						"rouge2_max,none": 35.337287621187315,
						"rouge2_max_stderr,none": 1.0289730385388947,
						"rougeL_acc,none": 0.2937576499388005,
						"rougeL_acc_stderr,none": 0.015945068581236614,
						"rougeL_diff,none": -9.337238344121152,
						"rougeL_diff_stderr,none": 0.9506180235729325,
						"rougeL_max,none": 48.63327555013695,
						"rougeL_max_stderr,none": 0.9038885289700457
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3329253365973072,
						"bleu_acc_stderr,none": 0.01649740238201205,
						"bleu_diff,none": -6.895256987188668,
						"bleu_diff_stderr,none": 0.8576771259880862,
						"bleu_max,none": 26.51497918105772,
						"bleu_max_stderr,none": 0.805806828659781,
						"rouge1_acc,none": 0.3011015911872705,
						"rouge1_acc_stderr,none": 0.01605899902610061,
						"rouge1_diff,none": -9.040128092055529,
						"rouge1_diff_stderr,none": 0.9376970064446629,
						"rouge1_max,none": 51.50410778909885,
						"rouge1_max_stderr,none": 0.8865691112640895,
						"rouge2_acc,none": 0.26193390452876375,
						"rouge2_acc_stderr,none": 0.015392118805015025,
						"rouge2_diff,none": -10.701498671702987,
						"rouge2_diff_stderr,none": 1.1273527724275378,
						"rouge2_max,none": 35.337287621187315,
						"rouge2_max_stderr,none": 1.0289730385388947,
						"rougeL_acc,none": 0.2937576499388005,
						"rougeL_acc_stderr,none": 0.015945068581236614,
						"rougeL_diff,none": -9.337238344121152,
						"rougeL_diff_stderr,none": 0.9506180235729325,
						"rougeL_max,none": 48.63327555013695,
						"rougeL_max_stderr,none": 0.9038885289700457
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2521419828641371,
						"acc_stderr,none": 0.015201522246299969,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4014115884059886,
						"acc_stderr,none": 0.014118973029441397,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.16043307086614172,
						"exact_match_stderr,none": 0.00814366027519534
					},
					"wic": {
						"acc,none": 0.5658307210031348,
						"acc_stderr,none": 0.019638263845456132,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6341811619911407,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5520565910449091,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.492252012798215,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7205998421468035,
						"acc_stderr,none": 0.01261082653940468,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.40384615384615385,
						"acc_stderr,none": 0.04834688952654019,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8681318681318682,
						"acc_stderr,none": 0.020515321360773595,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6234545454545455,
						"acc_stderr,none": 0.07090024977589493,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.021874299301689253,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.712,
						"acc_stderr,none": 0.020271503835075217,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.019635965529725512,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.022270877485360437,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.022094713229761784,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.644,
						"acc_stderr,none": 0.02143471235607266,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.020055833888070897,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.02055326917420918,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43745649263721553,
						"acc_stderr,none": 0.051978434113395235,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.009460223484996469,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4827309236947791,
						"acc_stderr,none": 0.010016093498409708,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4875502008032129,
						"acc_stderr,none": 0.010018965593055396,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3823293172690763,
						"acc_stderr,none": 0.009740580649033704,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5313253012048192,
						"acc_stderr,none": 0.010002384719762126,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5052208835341365,
						"acc_stderr,none": 0.010021526496530328,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4975903614457831,
						"acc_stderr,none": 0.010021956483068086,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42650602409638555,
						"acc_stderr,none": 0.009913215943570534,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4875502008032129,
						"acc_stderr,none": 0.010018965593055396,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.40120481927710844,
						"acc_stderr,none": 0.009824484469158961,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41606425702811245,
						"acc_stderr,none": 0.009879848511479756,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4506024096385542,
						"acc_stderr,none": 0.00997304277481168,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40923694779116465,
						"acc_stderr,none": 0.009855567414480246,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40240963855421685,
						"acc_stderr,none": 0.009829321288467443,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3465863453815261,
						"acc_stderr,none": 0.009538660220458996,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6298658323807231,
						"acc_stderr,none": 0.06288941167526697,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.585704831237591,
						"acc_stderr,none": 0.012676689821720669,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7835870284579749,
						"acc_stderr,none": 0.010597338079182233,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7174056915949703,
						"acc_stderr,none": 0.011587123627044841,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5698213103904699,
						"acc_stderr,none": 0.012741052817471078,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6022501654533422,
						"acc_stderr,none": 0.012595197856703516,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6664460622104567,
						"acc_stderr,none": 0.012133247747835347,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5433487756452681,
						"acc_stderr,none": 0.01281867645248196,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6843150231634679,
						"acc_stderr,none": 0.011960973299680223,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5526141628060887,
						"acc_stderr,none": 0.012795688167385296,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5890138980807412,
						"acc_stderr,none": 0.012661578894368943,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6340172071475844,
						"acc_stderr,none": 0.012396308684399374,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8199595414699933,
						"acc_stderr,none": 0.036530839797964496,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8795698924731182,
						"acc_stderr,none": 0.006751257189226948,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7507820646506778,
						"acc_stderr,none": 0.013975386806002533,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7832699619771863,
						"acc_stderr,none": 0.025454504291142595,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6952380952380952,
						"acc_stderr,none": 0.02597659935230537,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7916666666666666,
						"acc_stderr,none": 0.018107836663152053,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C0-rwkv-153_pth"
	},
	"./rwkv-x-dev/1_3-C0-rwkv-60_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6347237880496054,
						"acc_norm,none": 0.6423337091319054,
						"acc_norm_stderr,none": 0.09290914990179311,
						"acc_stderr,none": 0.11047882924264471,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4784375,
						"acc_stderr,none": 0.04784450252356575,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0553,
						"acc_stderr,none": 0.02974595700317713,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.814910447761194,
						"acc_stderr,none": 0.1594524259922161,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.28454680534918286,
						"acc_norm,none": 0.28454680534918286,
						"acc_norm_stderr,none": 0.12546982090493775,
						"acc_stderr,none": 0.12546982090493775,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.30832326023139356,
						"acc_norm,none": 0.30832326023139356,
						"acc_norm_stderr,none": 0.056982837727874165,
						"acc_stderr,none": 0.056982837727874165,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.461594364937388,
						"likelihood_diff_stderr,none": 0.5218435025554806,
						"pct_stereotype,none": 0.618515205724508,
						"pct_stereotype_stderr,none": 0.06779316747220007
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.16338582677165353,
						"exact_match_stderr,none": 0.008203795712801939
					},
					"glue": {
						"acc,none": 0.7373864602217259,
						"acc_stderr,none": 0.048159579738364676,
						"alias": "glue",
						"f1,none": 0.7165164305105304,
						"f1_stderr,none": 0.00011733386549926985,
						"mcc,none": 0.2852371922144055,
						"mcc_stderr,none": 0.0009546574841831274
					},
					"kmmlu": {
						"acc,none": 0.25275772451631534,
						"acc_norm,none": 0.25275772451631534,
						"acc_norm_stderr,none": 0.02291604840591558,
						"acc_stderr,none": 0.02291604840591558,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5255426441569832,
						"acc_norm,none": 0.562,
						"acc_norm_stderr,none": 0.00049329859719439,
						"acc_stderr,none": 0.041502050958988776,
						"alias": "kobest",
						"f1,none": 0.4299505805385205,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.717931302154085,
						"acc_stderr,none": 0.01759584188685439,
						"alias": "lambada",
						"perplexity,none": 3.58186934985286,
						"perplexity_stderr,none": 0.1937821012020122
					},
					"lambada_cloze": {
						"acc,none": 0.09198525130991654,
						"acc_stderr,none": 0.02181135934210522,
						"alias": "lambada_cloze",
						"perplexity,none": 243.3650121744029,
						"perplexity_stderr,none": 10.131344141982527
					},
					"lambada_multilingual": {
						"acc,none": 0.5397632447118182,
						"acc_stderr,none": 0.08583436346519245,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.7460725050401,
						"perplexity_stderr,none": 8.187793871443906
					},
					"mmlu": {
						"acc,none": 0.3994445235721407,
						"acc_stderr,none": 0.08664638523251636,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3851222104144527,
						"acc_stderr,none": 0.09155979451522625,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.4431927904731252,
						"acc_stderr,none": 0.08502739498941352,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.43646408839779005,
						"acc_stderr,none": 0.07665101748216732,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.34157944814462415,
						"acc_stderr,none": 0.06629790429175356,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.37388218594748046,
						"acc_norm,none": 0.34701639564631104,
						"acc_norm_stderr,none": 0.00011838282028775971,
						"acc_stderr,none": 0.08381629994028411,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4878571428571429,
						"acc_stderr,none": 0.05503982466052629,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7379891040256733,
						"acc_norm,none": 0.6461421112326158,
						"acc_norm_stderr,none": 0.010730998874619925,
						"acc_stderr,none": 0.15148417722863466,
						"alias": "pythia",
						"bits_per_byte,none": 0.6339385751844542,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5517956372128914,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2237325105024204,
						"perplexity_stderr,none": 0.06224185288795293,
						"word_perplexity,none": 10.482821986587334,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.40602836879432624,
						"acc_norm,none": 0.45390070921985815,
						"acc_norm_stderr,none": 0.06494692189816992,
						"acc_stderr,none": 0.053005012134036765,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6967821370337094,
						"acc_stderr,none": 0.08088910421587378,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3252388130954996,
						"acc_stderr,none": 0.0015949854656307676,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3353733170134639,
						"bleu_acc_stderr,none": 0.016527534039668987,
						"bleu_diff,none": -6.618048378622622,
						"bleu_diff_stderr,none": 0.8727421935974016,
						"bleu_max,none": 26.642436123429142,
						"bleu_max_stderr,none": 0.8112225080728003,
						"rouge1_acc,none": 0.2974296205630355,
						"rouge1_acc_stderr,none": 0.016002651487361002,
						"rouge1_diff,none": -8.712637578236393,
						"rouge1_diff_stderr,none": 0.9613030118003614,
						"rouge1_max,none": 51.56613129898997,
						"rouge1_max_stderr,none": 0.8854552898464716,
						"rouge2_acc,none": 0.2631578947368421,
						"rouge2_acc_stderr,none": 0.015415241740237012,
						"rouge2_diff,none": -10.299603055500173,
						"rouge2_diff_stderr,none": 1.14203323777167,
						"rouge2_max,none": 35.408678090010596,
						"rouge2_max_stderr,none": 1.037251729790366,
						"rougeL_acc,none": 0.2974296205630355,
						"rougeL_acc_stderr,none": 0.016002651487361002,
						"rougeL_diff,none": -9.107939695628595,
						"rougeL_diff_stderr,none": 0.9735156072604116,
						"rougeL_max,none": 48.746102374106314,
						"rougeL_max_stderr,none": 0.9030391009263778
					},
					"xcopa": {
						"acc,none": 0.6236363636363635,
						"acc_stderr,none": 0.0719421819722929,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43593038821954483,
						"acc_stderr,none": 0.04792776487499466,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6304674808976596,
						"acc_stderr,none": 0.062016620175044196,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8186109238031019,
						"acc_stderr,none": 0.035284653916876206,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6347237880496054,
						"acc_norm,none": 0.6423337091319054,
						"acc_norm_stderr,none": 0.09290914990179311,
						"acc_stderr,none": 0.11047882924264471,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4784375,
						"acc_stderr,none": 0.04784450252356575,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.015635487471405186,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.443,
						"acc_stderr,none": 0.0157161699532041,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.4266666666666667,
						"acc_stderr,none": 0.014283622442395878,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.40102389078498296,
						"acc_norm,none": 0.4462457337883959,
						"acc_norm_stderr,none": 0.014526705548539982,
						"acc_stderr,none": 0.014322255790719867,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.75,
						"acc_norm,none": 0.7390572390572391,
						"acc_norm_stderr,none": 0.009011142493235973,
						"acc_stderr,none": 0.008885233166386385,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0553,
						"acc_stderr,none": 0.02974595700317713,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.068,
						"acc_stderr,none": 0.005630617366325301,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.1215,
						"acc_stderr,none": 0.00730722743497089,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0715,
						"acc_stderr,none": 0.005762853480708971,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.023,
						"acc_stderr,none": 0.003352778036238045,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.021,
						"acc_stderr,none": 0.0032069677767574438,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0625,
						"acc_stderr,none": 0.0054140124459944395,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0075,
						"acc_stderr,none": 0.001929698647051984,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0995,
						"acc_stderr,none": 0.006694944820016821,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0025,
						"acc_stderr,none": 0.0011169148353275444,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.076,
						"acc_stderr,none": 0.0059270198905006415,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.006073752711496746,
						"acc_stderr,none": 0.0016186926522842625,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.814910447761194,
						"acc_stderr,none": 0.1594524259922161,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248118,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319416,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.002637794146243787,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.845,
						"acc_stderr,none": 0.011450157470799461,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.00853415677333344,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.795,
						"acc_stderr,none": 0.012772554096113121,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.015672320237336206,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713329,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936698,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346933,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403623,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.0074548356504067215,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427417,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571408,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653878,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.00828206451270416,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178336,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651552,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.014442734941575022,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.014580006055436969,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.014111099288259588,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406729,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724444,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274703,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.439,
						"acc_stderr,none": 0.01570113134540077,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.00918887563499668,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.814,
						"acc_stderr,none": 0.012310790208412787,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.591,
						"acc_stderr,none": 0.015555094373257946,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.721,
						"acc_stderr,none": 0.014190150117612037,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.00905439020486644,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904628,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333342,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400248,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.68,
						"acc_stderr,none": 0.014758652303574883,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.00896305396259208,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.404,
						"acc_stderr,none": 0.015524980677122581,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.611,
						"acc_stderr,none": 0.015424555647308498,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.551,
						"acc_stderr,none": 0.01573679276875202,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.783,
						"acc_stderr,none": 0.01304151375727071,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.01577824302490459,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524301,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621238,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.756,
						"acc_stderr,none": 0.013588548437881423,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753651,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.003299983316607817,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099179,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.015524980677122583,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817143,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.01035486471293671,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256575,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.014606483127342765,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.444,
						"acc_stderr,none": 0.01571976816340209,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108652,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118587,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858924,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357803,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696851,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.01562562511262066,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.837,
						"acc_stderr,none": 0.011686212712746828,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140929,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139981,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656799,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274702,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.364,
						"acc_stderr,none": 0.015222868840522019,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.304,
						"acc_stderr,none": 0.01455320568795045,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7162079510703364,
						"acc_stderr,none": 0.007885191054174687,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.04459412925079224,
						"alias": "cb",
						"f1,none": 0.7007651189602767,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.28454680534918286,
						"acc_norm,none": 0.28454680534918286,
						"acc_norm_stderr,none": 0.12546982090493775,
						"acc_stderr,none": 0.12546982090493775,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141221,
						"acc_stderr,none": 0.06372446937141221,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5151515151515151,
						"acc_norm,none": 0.5151515151515151,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.0659529705144534,
						"acc_stderr,none": 0.0659529705144534,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.060606060606060594,
						"acc_stderr,none": 0.060606060606060594,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.12909944487358055,
						"acc_stderr,none": 0.12909944487358055,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.11433239009500591,
						"acc_stderr,none": 0.11433239009500591,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.1042572070285374,
						"acc_stderr,none": 0.1042572070285374,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894599,
						"acc_stderr,none": 0.10540925533894599,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.11236664374387367,
						"acc_stderr,none": 0.11236664374387367,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.4772727272727273,
						"acc_norm,none": 0.4772727272727273,
						"acc_norm_stderr,none": 0.07617047451458002,
						"acc_stderr,none": 0.07617047451458002,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.07099970268936748,
						"acc_stderr,none": 0.07099970268936748,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.30832326023139356,
						"acc_norm,none": 0.30832326023139356,
						"acc_norm_stderr,none": 0.056982837727874165,
						"acc_stderr,none": 0.056982837727874165,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.3136094674556213,
						"acc_norm,none": 0.3136094674556213,
						"acc_norm_stderr,none": 0.03579526516456226,
						"acc_stderr,none": 0.03579526516456226,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.27702702702702703,
						"acc_norm,none": 0.27702702702702703,
						"acc_norm_stderr,none": 0.03691164789738652,
						"acc_stderr,none": 0.03691164789738652,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.3875,
						"acc_norm,none": 0.3875,
						"acc_norm_stderr,none": 0.03863583812241406,
						"acc_stderr,none": 0.03863583812241406,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0347769116216366,
						"acc_stderr,none": 0.0347769116216366,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3349282296650718,
						"acc_norm,none": 0.3349282296650718,
						"acc_norm_stderr,none": 0.03272491043051241,
						"acc_stderr,none": 0.03272491043051241,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.03612181848191273,
						"acc_stderr,none": 0.03612181848191273,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.32061068702290074,
						"acc_norm,none": 0.32061068702290074,
						"acc_norm_stderr,none": 0.04093329229834278,
						"acc_stderr,none": 0.04093329229834278,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.040718744426068945,
						"acc_stderr,none": 0.040718744426068945,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.0452235007738203,
						"acc_stderr,none": 0.0452235007738203,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3622291021671827,
						"acc_norm,none": 0.3622291021671827,
						"acc_norm_stderr,none": 0.026785273354320082,
						"acc_stderr,none": 0.026785273354320082,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.29901960784313725,
						"acc_norm,none": 0.29901960784313725,
						"acc_norm_stderr,none": 0.03213325717373618,
						"acc_stderr,none": 0.03213325717373618,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3854748603351955,
						"acc_norm,none": 0.3854748603351955,
						"acc_norm_stderr,none": 0.03648025419294365,
						"acc_stderr,none": 0.03648025419294365,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422647,
						"acc_stderr,none": 0.028146970599422647,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800375,
						"acc_stderr,none": 0.04142972007800375,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.4205607476635514,
						"acc_norm,none": 0.4205607476635514,
						"acc_norm_stderr,none": 0.04794743635189596,
						"acc_stderr,none": 0.04794743635189596,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.040191074725573483,
						"acc_stderr,none": 0.040191074725573483,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.04350546818999061,
						"acc_stderr,none": 0.04350546818999061,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2783882783882784,
						"acc_norm,none": 0.2783882783882784,
						"acc_norm_stderr,none": 0.02717645531875414,
						"acc_stderr,none": 0.02717645531875414,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.032834720561085676,
						"acc_stderr,none": 0.032834720561085676,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2807017543859649,
						"acc_norm,none": 0.2807017543859649,
						"acc_norm_stderr,none": 0.03446296217088426,
						"acc_stderr,none": 0.03446296217088426,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2925170068027211,
						"acc_norm,none": 0.2925170068027211,
						"acc_norm_stderr,none": 0.03764931984085173,
						"acc_stderr,none": 0.03764931984085173,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.03694846055443904,
						"acc_stderr,none": 0.03694846055443904,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.037676093121953455,
						"acc_stderr,none": 0.037676093121953455,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.37423312883435583,
						"acc_norm,none": 0.37423312883435583,
						"acc_norm_stderr,none": 0.03802068102899615,
						"acc_stderr,none": 0.03802068102899615,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.03451628876250621,
						"acc_stderr,none": 0.03451628876250621,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.02839429305079051,
						"acc_stderr,none": 0.02839429305079051,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2828282828282828,
						"acc_norm,none": 0.2828282828282828,
						"acc_norm_stderr,none": 0.032087795587867514,
						"acc_stderr,none": 0.032087795587867514,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.032145368597886394,
						"acc_stderr,none": 0.032145368597886394,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.22608695652173913,
						"acc_norm,none": 0.22608695652173913,
						"acc_norm_stderr,none": 0.027641785707241334,
						"acc_stderr,none": 0.027641785707241334,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.3111111111111111,
						"acc_norm,none": 0.3111111111111111,
						"acc_norm_stderr,none": 0.03999262876617723,
						"acc_stderr,none": 0.03999262876617723,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.038221270785361555,
						"acc_stderr,none": 0.038221270785361555,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.035208939510976534,
						"acc_stderr,none": 0.035208939510976534,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.31543624161073824,
						"acc_norm,none": 0.31543624161073824,
						"acc_norm_stderr,none": 0.03819723167141383,
						"acc_stderr,none": 0.03819723167141383,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3305084745762712,
						"acc_norm,none": 0.3305084745762712,
						"acc_norm_stderr,none": 0.043488147791922734,
						"acc_stderr,none": 0.043488147791922734,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2636363636363636,
						"acc_norm,none": 0.2636363636363636,
						"acc_norm_stderr,none": 0.04220224692971987,
						"acc_stderr,none": 0.04220224692971987,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.34965034965034963,
						"acc_norm,none": 0.34965034965034963,
						"acc_norm_stderr,none": 0.04001716028382393,
						"acc_stderr,none": 0.04001716028382393,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2698412698412698,
						"acc_norm,none": 0.2698412698412698,
						"acc_norm_stderr,none": 0.03970158273235172,
						"acc_stderr,none": 0.03970158273235172,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336699,
						"acc_stderr,none": 0.03333068663336699,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.37790697674418605,
						"acc_norm,none": 0.37790697674418605,
						"acc_norm_stderr,none": 0.03707849218723281,
						"acc_stderr,none": 0.03707849218723281,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2773722627737226,
						"acc_norm,none": 0.2773722627737226,
						"acc_norm_stderr,none": 0.022110415304121923,
						"acc_stderr,none": 0.022110415304121923,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.4392523364485981,
						"acc_norm,none": 0.4392523364485981,
						"acc_norm_stderr,none": 0.03400564171454576,
						"acc_stderr,none": 0.03400564171454576,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3089430894308943,
						"acc_norm,none": 0.3089430894308943,
						"acc_norm_stderr,none": 0.04183273258787625,
						"acc_stderr,none": 0.04183273258787625,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.319672131147541,
						"acc_norm,none": 0.319672131147541,
						"acc_norm_stderr,none": 0.04239540943837384,
						"acc_stderr,none": 0.04239540943837384,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.03211151353994381,
						"acc_stderr,none": 0.03211151353994381,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3055555555555556,
						"acc_norm,none": 0.3055555555555556,
						"acc_norm_stderr,none": 0.03443002441392582,
						"acc_stderr,none": 0.03443002441392582,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31216931216931215,
						"acc_norm,none": 0.31216931216931215,
						"acc_norm_stderr,none": 0.03379535035917228,
						"acc_stderr,none": 0.03379535035917228,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.29310344827586204,
						"acc_norm,none": 0.29310344827586204,
						"acc_norm_stderr,none": 0.04244626443180183,
						"acc_stderr,none": 0.04244626443180183,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2827586206896552,
						"acc_norm,none": 0.2827586206896552,
						"acc_norm_stderr,none": 0.037528339580033376,
						"acc_stderr,none": 0.037528339580033376,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.04622501635210239,
						"acc_stderr,none": 0.04622501635210239,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2914285714285714,
						"acc_norm,none": 0.2914285714285714,
						"acc_norm_stderr,none": 0.034449526562290195,
						"acc_stderr,none": 0.034449526562290195,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27014218009478674,
						"acc_norm,none": 0.27014218009478674,
						"acc_norm_stderr,none": 0.03064119407629314,
						"acc_stderr,none": 0.03064119407629314,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.26063829787234044,
						"acc_norm,none": 0.26063829787234044,
						"acc_norm_stderr,none": 0.022668978836259783,
						"acc_stderr,none": 0.022668978836259783,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3577586206896552,
						"acc_norm,none": 0.3577586206896552,
						"acc_norm_stderr,none": 0.0315382945960225,
						"acc_stderr,none": 0.0315382945960225,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3218390804597701,
						"acc_norm,none": 0.3218390804597701,
						"acc_norm_stderr,none": 0.03551916251914105,
						"acc_stderr,none": 0.03551916251914105,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03944624162501116,
						"acc_stderr,none": 0.03944624162501116,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.33185840707964603,
						"acc_norm,none": 0.33185840707964603,
						"acc_norm_stderr,none": 0.031392030462821234,
						"acc_stderr,none": 0.031392030462821234,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3515151515151515,
						"acc_norm,none": 0.3515151515151515,
						"acc_norm_stderr,none": 0.037282069986826503,
						"acc_stderr,none": 0.037282069986826503,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3254437869822485,
						"acc_norm,none": 0.3254437869822485,
						"acc_norm_stderr,none": 0.03614867847292203,
						"acc_stderr,none": 0.03614867847292203,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.35403726708074534,
						"acc_norm,none": 0.35403726708074534,
						"acc_norm_stderr,none": 0.03780665290318812,
						"acc_stderr,none": 0.03780665290318812,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.325,
						"acc_norm,none": 0.325,
						"acc_norm_stderr,none": 0.03714454174077367,
						"acc_stderr,none": 0.03714454174077367,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.2852371922144055,
						"mcc_stderr,none": 0.030897532007963475
					},
					"copa": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.036845294917747094,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.461594364937388,
						"likelihood_diff_stderr,none": 0.5218435025554806,
						"pct_stereotype,none": 0.618515205724508,
						"pct_stereotype_stderr,none": 0.06779316747220007
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.722793679189028,
						"likelihood_diff_stderr,none": 0.08740534942830487,
						"pct_stereotype,none": 0.6463923673225999,
						"pct_stereotype_stderr,none": 0.011678092691867982
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.1098901098901095,
						"likelihood_diff_stderr,none": 0.3891060555354371,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.079545454545454,
						"likelihood_diff_stderr,none": 1.8323383538460378,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.973076923076923,
						"likelihood_diff_stderr,none": 0.6055910146881357,
						"pct_stereotype,none": 0.7846153846153846,
						"pct_stereotype_stderr,none": 0.05138611236879767
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.686328125,
						"likelihood_diff_stderr,none": 0.16518996173361047,
						"pct_stereotype,none": 0.625,
						"pct_stereotype_stderr,none": 0.027105679632478466
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.6180555555555554,
						"likelihood_diff_stderr,none": 0.24391091646506874,
						"pct_stereotype,none": 0.5833333333333334,
						"pct_stereotype_stderr,none": 0.03362277436608044
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.8975694444444446,
						"likelihood_diff_stderr,none": 0.3164299676444534,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.559301181102362,
						"likelihood_diff_stderr,none": 0.15259550814915282,
						"pct_stereotype,none": 0.562992125984252,
						"pct_stereotype_stderr,none": 0.022028849296085083
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.6554054054054053,
						"likelihood_diff_stderr,none": 0.34873270117365723,
						"pct_stereotype,none": 0.7117117117117117,
						"pct_stereotype_stderr,none": 0.04318860867532051
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.079301075268817,
						"likelihood_diff_stderr,none": 0.4431581825914874,
						"pct_stereotype,none": 0.8924731182795699,
						"pct_stereotype_stderr,none": 0.032297000033640014
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.2631578947368425,
						"likelihood_diff_stderr,none": 0.245276931063261,
						"pct_stereotype,none": 0.6842105263157895,
						"pct_stereotype_stderr,none": 0.03381137233892748
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.200022361359571,
						"likelihood_diff_stderr,none": 0.0744035509359531,
						"pct_stereotype,none": 0.5909361955873583,
						"pct_stereotype_stderr,none": 0.01200960753851581
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.345833333333333,
						"likelihood_diff_stderr,none": 0.3153509157469215,
						"pct_stereotype,none": 0.5666666666666667,
						"pct_stereotype_stderr,none": 0.05252667118728807
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.8461538461538463,
						"likelihood_diff_stderr,none": 0.8245717913276286,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.84469696969697,
						"likelihood_diff_stderr,none": 0.4360224016238241,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.056159743502623156
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.7355919003115265,
						"likelihood_diff_stderr,none": 0.12973223035192422,
						"pct_stereotype,none": 0.5950155763239875,
						"pct_stereotype_stderr,none": 0.0274415310396384
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.2509881422924902,
						"likelihood_diff_stderr,none": 0.19442062573894742,
						"pct_stereotype,none": 0.43478260869565216,
						"pct_stereotype_stderr,none": 0.031227956788816423
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.4618055555555554,
						"likelihood_diff_stderr,none": 0.43919885123128805,
						"pct_stereotype,none": 0.6805555555555556,
						"pct_stereotype_stderr,none": 0.055335047518872166
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.885054347826087,
						"likelihood_diff_stderr,none": 0.14287717075282722,
						"pct_stereotype,none": 0.508695652173913,
						"pct_stereotype_stderr,none": 0.023334471757161752
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.356521739130435,
						"likelihood_diff_stderr,none": 0.27659910334947585,
						"pct_stereotype,none": 0.782608695652174,
						"pct_stereotype_stderr,none": 0.038631448549506
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.491758241758242,
						"likelihood_diff_stderr,none": 0.31386485948914233,
						"pct_stereotype,none": 0.8131868131868132,
						"pct_stereotype_stderr,none": 0.0410844685503588
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.699298469387755,
						"likelihood_diff_stderr,none": 0.24382751942817196,
						"pct_stereotype,none": 0.7091836734693877,
						"pct_stereotype_stderr,none": 0.03252156607969807
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.16338582677165353,
						"exact_match_stderr,none": 0.008203795712801939
					},
					"glue": {
						"acc,none": 0.7373864602217259,
						"acc_stderr,none": 0.048159579738364676,
						"alias": "glue",
						"f1,none": 0.7165164305105304,
						"f1_stderr,none": 0.00011733386549926985,
						"mcc,none": 0.2852371922144055,
						"mcc_stderr,none": 0.0009546574841831274
					},
					"hellaswag": {
						"acc,none": 0.5400318661621191,
						"acc_norm,none": 0.7261501692889862,
						"acc_norm_stderr,none": 0.004450214826707172,
						"acc_stderr,none": 0.004973762948302803,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.25275772451631534,
						"acc_norm,none": 0.25275772451631534,
						"acc_norm_stderr,none": 0.02291604840591558,
						"acc_stderr,none": 0.02291604840591558,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.232,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.013354937452281558,
						"acc_stderr,none": 0.013354937452281558,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.013663187134877651,
						"acc_stderr,none": 0.013663187134877651,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.249,
						"acc_norm,none": 0.249,
						"acc_norm_stderr,none": 0.013681600278702325,
						"acc_stderr,none": 0.013681600278702325,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.013772206565168544,
						"acc_stderr,none": 0.013772206565168544,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.018032386001530076,
						"acc_stderr,none": 0.018032386001530076,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.237,
						"acc_norm,none": 0.237,
						"acc_norm_stderr,none": 0.013454070462577943,
						"acc_stderr,none": 0.013454070462577943,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.312,
						"acc_norm,none": 0.312,
						"acc_norm_stderr,none": 0.014658474370509001,
						"acc_stderr,none": 0.014658474370509001,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774166,
						"acc_stderr,none": 0.013877773329774166,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.31,
						"acc_norm,none": 0.31,
						"acc_norm_stderr,none": 0.032785276754349606,
						"acc_stderr,none": 0.032785276754349606,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259734,
						"acc_stderr,none": 0.013929286594259734,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.24615384615384617,
						"acc_norm,none": 0.24615384615384617,
						"acc_norm_stderr,none": 0.037927115964796136,
						"acc_stderr,none": 0.037927115964796136,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.045604802157206824,
						"acc_stderr,none": 0.045604802157206824,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.013493000446937587,
						"acc_stderr,none": 0.013493000446937587,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234183,
						"acc_stderr,none": 0.013807775152234183,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.226,
						"acc_norm,none": 0.226,
						"acc_norm_stderr,none": 0.013232501619085336,
						"acc_stderr,none": 0.013232501619085336,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.228,
						"acc_norm,none": 0.228,
						"acc_norm_stderr,none": 0.013273740700804478,
						"acc_stderr,none": 0.013273740700804478,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.01362606581775063,
						"acc_stderr,none": 0.01362606581775063,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.251,
						"acc_norm,none": 0.251,
						"acc_norm_stderr,none": 0.01371813351688891,
						"acc_stderr,none": 0.01371813351688891,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.013736254390651145,
						"acc_stderr,none": 0.013736254390651145,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.01391220865102135,
						"acc_stderr,none": 0.01391220865102135,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816508,
						"acc_stderr,none": 0.04229525846816508,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.01373625439065114,
						"acc_stderr,none": 0.01373625439065114,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.01399667485179627,
						"acc_stderr,none": 0.01399667485179627,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234195,
						"acc_stderr,none": 0.013807775152234195,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881435,
						"acc_stderr,none": 0.013588548437881435,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.013414729030247121,
						"acc_stderr,none": 0.013414729030247121,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881418,
						"acc_stderr,none": 0.013588548437881418,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.25166666666666665,
						"acc_norm,none": 0.25166666666666665,
						"acc_norm_stderr,none": 0.017731561494907167,
						"acc_stderr,none": 0.017731561494907167,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259736,
						"acc_stderr,none": 0.013929286594259736,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.013825416526895024,
						"acc_stderr,none": 0.013825416526895024,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.223,
						"acc_norm,none": 0.223,
						"acc_norm_stderr,none": 0.013169830843425682,
						"acc_stderr,none": 0.013169830843425682,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774166,
						"acc_stderr,none": 0.013877773329774166,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.042923469599092816,
						"acc_stderr,none": 0.042923469599092816,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.025574048533225632,
						"acc_stderr,none": 0.025574048533225632,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.013860415257527911,
						"acc_stderr,none": 0.013860415257527911,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.013736254390651145,
						"acc_stderr,none": 0.013736254390651145,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234199,
						"acc_stderr,none": 0.013807775152234199,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.03182868716477583,
						"acc_stderr,none": 0.03182868716477583,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.228,
						"acc_norm,none": 0.228,
						"acc_norm_stderr,none": 0.01327374070080448,
						"acc_stderr,none": 0.01327374070080448,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.013790038620872835,
						"acc_stderr,none": 0.013790038620872835,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.02960162633044062,
						"acc_stderr,none": 0.02960162633044062,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.304,
						"acc_norm,none": 0.304,
						"acc_norm_stderr,none": 0.01455320568795044,
						"acc_stderr,none": 0.01455320568795044,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5255426441569832,
						"acc_norm,none": 0.562,
						"acc_norm_stderr,none": 0.00049329859719439,
						"acc_stderr,none": 0.041502050958988776,
						"alias": "kobest",
						"f1,none": 0.4299505805385205,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5235042735042735,
						"acc_stderr,none": 0.013334010264781468,
						"alias": " - kobest_boolq",
						"f1,none": 0.388864200502949,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": " - kobest_copa",
						"f1,none": 0.6178439780335515,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.444,
						"acc_norm,none": 0.562,
						"acc_norm_stderr,none": 0.02221032636397741,
						"acc_stderr,none": 0.022242244375731024,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.43915991498137585,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5188916876574308,
						"acc_stderr,none": 0.025108004284191587,
						"alias": " - kobest_sentineg",
						"f1,none": 0.41394288364184406,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.717931302154085,
						"acc_stderr,none": 0.01759584188685439,
						"alias": "lambada",
						"perplexity,none": 3.58186934985286,
						"perplexity_stderr,none": 0.1937821012020122
					},
					"lambada_cloze": {
						"acc,none": 0.09198525130991654,
						"acc_stderr,none": 0.02181135934210522,
						"alias": "lambada_cloze",
						"perplexity,none": 243.3650121744029,
						"perplexity_stderr,none": 10.131344141982527
					},
					"lambada_multilingual": {
						"acc,none": 0.5397632447118182,
						"acc_stderr,none": 0.08583436346519245,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.7460725050401,
						"perplexity_stderr,none": 8.187793871443906
					},
					"lambada_openai": {
						"acc,none": 0.7504366388511546,
						"acc_stderr,none": 0.006029197365300717,
						"alias": " - lambada_openai",
						"perplexity,none": 3.2237325105024204,
						"perplexity_stderr,none": 0.06224185288795293
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.04909761304094702,
						"acc_stderr,none": 0.0030103031355499157,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 229.67850898836204,
						"perplexity_stderr,none": 6.871243839739928
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42907044440131964,
						"acc_stderr,none": 0.006895529739245136,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.47887545209441,
						"perplexity_stderr,none": 1.916750240350421
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7504366388511546,
						"acc_stderr,none": 0.006029197365300717,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.223968363632318,
						"perplexity_stderr,none": 0.06225872960534053
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4581797011449641,
						"acc_stderr,none": 0.0069415687750082455,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.082275833367955,
						"perplexity_stderr,none": 1.3808206979431574
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5470599650688919,
						"acc_stderr,none": 0.006935054751870186,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.3887917181158,
						"perplexity_stderr,none": 0.7935178447896293
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5140694740927615,
						"acc_stderr,none": 0.006963219279097558,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.55645115799003,
						"perplexity_stderr,none": 1.1414343376762928
					},
					"lambada_standard": {
						"acc,none": 0.6850378420337667,
						"acc_stderr,none": 0.006471404446305818,
						"alias": " - lambada_standard",
						"perplexity,none": 3.9420292419829193,
						"perplexity_stderr,none": 0.07980436457745645
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.13487288957888607,
						"acc_stderr,none": 0.004758985315548352,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 257.0515153604438,
						"perplexity_stderr,none": 8.02594763187795
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.3377862595419847,
						"exact_match_stderr,get-answer": 0.011932515057580263
					},
					"logiqa": {
						"acc,none": 0.23963133640552994,
						"acc_norm,none": 0.2780337941628264,
						"acc_norm_stderr,none": 0.017573187770282713,
						"acc_stderr,none": 0.01674276693510144,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.25699745547073793,
						"acc_norm,none": 0.2881679389312977,
						"acc_norm_stderr,none": 0.011426770634965253,
						"acc_stderr,none": 0.011024819039416617,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2549413735343384,
						"acc_norm,none": 0.26700167504187605,
						"acc_norm_stderr,none": 0.008098583692885271,
						"acc_stderr,none": 0.007978403103631439,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.484537174327473,
						"acc_stderr,none": 0.00514343466352321,
						"alias": "mc_taco",
						"f1,none": 0.5534452702082759,
						"f1_stderr,none": 0.005739533495560975
					},
					"medmcqa": {
						"acc,none": 0.3389911546736792,
						"acc_norm,none": 0.3389911546736792,
						"acc_norm_stderr,none": 0.0073199110632478775,
						"acc_stderr,none": 0.0073199110632478775,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.35899450117831894,
						"acc_norm,none": 0.35899450117831894,
						"acc_norm_stderr,none": 0.013450276741385178,
						"acc_stderr,none": 0.013450276741385178,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.3994445235721407,
						"acc_stderr,none": 0.08664638523251636,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.04232073695151589,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.4276315789473684,
						"acc_stderr,none": 0.04026097083296559,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.030151134457776285,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.375,
						"acc_stderr,none": 0.04048439222695598,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.34104046242774566,
						"acc_stderr,none": 0.03614665424180826,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.042801058373643966,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.049888765156985884,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3446808510638298,
						"acc_stderr,none": 0.031068985963122155,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21929824561403508,
						"acc_stderr,none": 0.03892431106518751,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.41379310344827586,
						"acc_stderr,none": 0.04104269211806231,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.30952380952380953,
						"acc_stderr,none": 0.023809523809523857,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3412698412698413,
						"acc_stderr,none": 0.042407993275749234,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4483870967741935,
						"acc_stderr,none": 0.028292056830112728,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.03178529710642749,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5575757575757576,
						"acc_stderr,none": 0.038783721137112745,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4696969696969697,
						"acc_stderr,none": 0.03555804051763928,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5544041450777202,
						"acc_stderr,none": 0.03587014986075661,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.35128205128205126,
						"acc_stderr,none": 0.024203665177902803,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3111111111111111,
						"acc_stderr,none": 0.028226446749683515,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.33613445378151263,
						"acc_stderr,none": 0.03068473711513537,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23841059602649006,
						"acc_stderr,none": 0.0347918557259966,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5137614678899083,
						"acc_stderr,none": 0.02142920208987408,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.27314814814814814,
						"acc_stderr,none": 0.030388051301678116,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5049019607843137,
						"acc_stderr,none": 0.035091433756067845,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5991561181434599,
						"acc_stderr,none": 0.031900803894732356,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.38565022421524664,
						"acc_stderr,none": 0.03266842214289201,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5114503816793893,
						"acc_stderr,none": 0.043841400240780176,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3851222104144527,
						"acc_stderr,none": 0.09155979451522625,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.45454545454545453,
						"acc_stderr,none": 0.045454545454545456,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.4444444444444444,
						"acc_stderr,none": 0.04803752235190192,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.37423312883435583,
						"acc_stderr,none": 0.03802068102899615,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.044328040552915185,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5242718446601942,
						"acc_stderr,none": 0.049449010929737795,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6367521367521367,
						"acc_stderr,none": 0.03150712523091265,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.05,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.565772669220945,
						"acc_stderr,none": 0.017724589389677785,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.42485549132947975,
						"acc_stderr,none": 0.02661335084026174,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.22569832402234638,
						"acc_stderr,none": 0.01398139505845505,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.4117647058823529,
						"acc_stderr,none": 0.02818059632825929,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.4431927904731252,
						"acc_stderr,none": 0.08502739498941352,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4983922829581994,
						"acc_stderr,none": 0.02839794490780661,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4382716049382716,
						"acc_stderr,none": 0.027607914087400473,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.30851063829787234,
						"acc_stderr,none": 0.02755336616510137,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3363754889178618,
						"acc_stderr,none": 0.01206708307945222,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.34191176470588236,
						"acc_stderr,none": 0.02881472242225418,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.42320261437908496,
						"acc_stderr,none": 0.019987809769482064,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.43636363636363634,
						"acc_stderr,none": 0.04750185058907296,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.363265306122449,
						"acc_stderr,none": 0.030789051139030806,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.43646408839779005,
						"acc_stderr,none": 0.07665101748216732,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5323383084577115,
						"acc_stderr,none": 0.035281314729336065,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.34157944814462415,
						"acc_stderr,none": 0.06629790429175356,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.05024183937956914,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.03726214354322415,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6198830409356725,
						"acc_stderr,none": 0.037229657413855394,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7840040753948039,
						"acc_stderr,none": 0.004153927494483167,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7902766476810414,
						"acc_stderr,none": 0.004105954400764088,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7328431372549019,
						"acc_stderr,none": 0.021932668544150203,
						"alias": "mrpc",
						"f1,none": 0.8315301391035549,
						"f1_stderr,none": 0.015895448354130492
					},
					"multimedqa": {
						"acc,none": 0.37388218594748046,
						"acc_norm,none": 0.34701639564631104,
						"acc_norm_stderr,none": 0.00011838282028775971,
						"acc_stderr,none": 0.08381629994028411,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5187706270627063,
						"acc_stderr,none": 0.007176740499730091,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7053235531458736,
						"mrr_stderr,none": 0.010369212045414817,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4108352144469526,
						"r@2_stderr,none": 0.016537908550616855
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6570729888117879,
						"mrr_stderr,none": 0.01047962167158151,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4604966139954853,
						"r@2_stderr,none": 0.016754777798868806
					},
					"openbookqa": {
						"acc,none": 0.296,
						"acc_norm,none": 0.408,
						"acc_norm_stderr,none": 0.02200091089387719,
						"acc_stderr,none": 0.020435342091896146,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.445,
						"acc_stderr,none": 0.011115272135099214,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3825,
						"acc_stderr,none": 0.010869956438573788,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.417,
						"acc_stderr,none": 0.011027978425535505,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5465,
						"acc_stderr,none": 0.011134669525078671,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5515,
						"acc_stderr,none": 0.011123656901911276,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5475,
						"acc_stderr,none": 0.011132557743886096,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.525,
						"acc_stderr,none": 0.011169148353274965,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4878571428571429,
						"acc_stderr,none": 0.05503982466052629,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7725788900979326,
						"acc_norm,none": 0.7845484221980413,
						"acc_norm_stderr,none": 0.009592463115658107,
						"acc_stderr,none": 0.00977985076784725,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.24407557643040137,
						"acc_norm,none": 0.2851195559350982,
						"acc_norm_stderr,none": 0.0032984003106770288,
						"acc_stderr,none": 0.0031381558044888793,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.020740596536488073,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7379891040256733,
						"acc_norm,none": 0.6461421112326158,
						"acc_norm_stderr,none": 0.010730998874619925,
						"acc_stderr,none": 0.15148417722863466,
						"alias": "pythia",
						"bits_per_byte,none": 0.6339385751844542,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5517956372128914,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2237325105024204,
						"perplexity_stderr,none": 0.06224185288795293,
						"word_perplexity,none": 10.482821986587334,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.40602836879432624,
						"acc_norm,none": 0.45390070921985815,
						"acc_norm_stderr,none": 0.06494692189816992,
						"acc_stderr,none": 0.053005012134036765,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5833333333333334,
						"acc_norm_stderr,none": 0.0451938453788867,
						"acc_stderr,none": 0.04583492485141056,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.35,
						"acc_norm,none": 0.4625,
						"acc_norm_stderr,none": 0.039540899134978144,
						"acc_stderr,none": 0.037826149818120415,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.397887323943662,
						"acc_norm,none": 0.39436619718309857,
						"acc_norm_stderr,none": 0.02905103950765015,
						"acc_stderr,none": 0.02909549291706491,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7496908236458075,
						"acc_stderr,none": 0.0021544329543181626,
						"alias": "qqp",
						"f1,none": 0.7155545561864073,
						"f1_stderr,none": 0.0027077948126596084
					},
					"race": {
						"acc,none": 0.3397129186602871,
						"acc_stderr,none": 0.014657914432586402,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2635,
						"em_stderr,none": 0.004405532416359069,
						"f1,none": 0.2730138097643852,
						"f1_stderr,none": 0.004417936600107803
					},
					"rte": {
						"acc,none": 0.6823104693140795,
						"acc_stderr,none": 0.028024503562454613,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.958,
						"acc_norm,none": 0.959,
						"acc_norm_stderr,none": 0.006273624021118798,
						"acc_stderr,none": 0.006346359293033833,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6787003610108303,
						"acc_stderr,none": 0.02810862605328869,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8956422018348624,
						"acc_stderr,none": 0.010359067206812064,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5792262321303608,
						"acc_norm,none": 0.7709687093871839,
						"acc_norm_stderr,none": 0.0029709579732523027,
						"acc_stderr,none": 0.0034904318398323755,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6967821370337094,
						"acc_stderr,none": 0.08088910421587378,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6139823717948718,
						"acc_stderr,none": 0.004872490314733759,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.9111178676396068,
						"acc_stderr,none": 0.002864995596442333,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5704901960784313,
						"acc_stderr,none": 0.004901531946121194,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3252388130954996,
						"acc_stderr,none": 0.0015949854656307676,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3353733170134639,
						"bleu_acc_stderr,none": 0.016527534039668987,
						"bleu_diff,none": -6.618048378622622,
						"bleu_diff_stderr,none": 0.8727421935974016,
						"bleu_max,none": 26.642436123429142,
						"bleu_max_stderr,none": 0.8112225080728003,
						"rouge1_acc,none": 0.2974296205630355,
						"rouge1_acc_stderr,none": 0.016002651487361002,
						"rouge1_diff,none": -8.712637578236393,
						"rouge1_diff_stderr,none": 0.9613030118003614,
						"rouge1_max,none": 51.56613129898997,
						"rouge1_max_stderr,none": 0.8854552898464716,
						"rouge2_acc,none": 0.2631578947368421,
						"rouge2_acc_stderr,none": 0.015415241740237012,
						"rouge2_diff,none": -10.299603055500173,
						"rouge2_diff_stderr,none": 1.14203323777167,
						"rouge2_max,none": 35.408678090010596,
						"rouge2_max_stderr,none": 1.037251729790366,
						"rougeL_acc,none": 0.2974296205630355,
						"rougeL_acc_stderr,none": 0.016002651487361002,
						"rougeL_diff,none": -9.107939695628595,
						"rougeL_diff_stderr,none": 0.9735156072604116,
						"rougeL_max,none": 48.746102374106314,
						"rougeL_max_stderr,none": 0.9030391009263778
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3353733170134639,
						"bleu_acc_stderr,none": 0.016527534039668987,
						"bleu_diff,none": -6.618048378622622,
						"bleu_diff_stderr,none": 0.8727421935974016,
						"bleu_max,none": 26.642436123429142,
						"bleu_max_stderr,none": 0.8112225080728003,
						"rouge1_acc,none": 0.2974296205630355,
						"rouge1_acc_stderr,none": 0.016002651487361002,
						"rouge1_diff,none": -8.712637578236393,
						"rouge1_diff_stderr,none": 0.9613030118003614,
						"rouge1_max,none": 51.56613129898997,
						"rouge1_max_stderr,none": 0.8854552898464716,
						"rouge2_acc,none": 0.2631578947368421,
						"rouge2_acc_stderr,none": 0.015415241740237012,
						"rouge2_diff,none": -10.299603055500173,
						"rouge2_diff_stderr,none": 1.14203323777167,
						"rouge2_max,none": 35.408678090010596,
						"rouge2_max_stderr,none": 1.037251729790366,
						"rougeL_acc,none": 0.2974296205630355,
						"rougeL_acc_stderr,none": 0.016002651487361002,
						"rougeL_diff,none": -9.107939695628595,
						"rougeL_diff_stderr,none": 0.9735156072604116,
						"rougeL_max,none": 48.746102374106314,
						"rougeL_max_stderr,none": 0.9030391009263778
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25091799265605874,
						"acc_stderr,none": 0.015176985027707694,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3995596335349405,
						"acc_stderr,none": 0.01401456282411012,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.16289370078740156,
						"exact_match_stderr,none": 0.00819384017148717
					},
					"wic": {
						"acc,none": 0.5799373040752351,
						"acc_stderr,none": 0.019555902537234413,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6339385751844542,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5517956372128914,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.482821986587334,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7205998421468035,
						"acc_stderr,none": 0.012610826539404678,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.41346153846153844,
						"acc_stderr,none": 0.04852294969729053,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8571428571428571,
						"acc_stderr,none": 0.021217447349500148,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6236363636363635,
						"acc_stderr,none": 0.0719421819722929,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.021912377885779967,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.022352791650914167,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.020143572847290788,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.01963596552972551,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407056,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.02210903931061855,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.021352091786223104,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.020055833888070897,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.020553269174209177,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43593038821954483,
						"acc_stderr,none": 0.04792776487499466,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337345,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4791164658634538,
						"acc_stderr,none": 0.010013327358568523,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.48032128514056227,
						"acc_stderr,none": 0.010014307727112703,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38433734939759034,
						"acc_stderr,none": 0.00975023876572252,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5273092369477912,
						"acc_stderr,none": 0.01000711288973199,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5020080321285141,
						"acc_stderr,none": 0.010021992045038411,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4931726907630522,
						"acc_stderr,none": 0.010021138522919167,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41967871485943775,
						"acc_stderr,none": 0.009891912665432363,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4879518072289157,
						"acc_stderr,none": 0.010019162857624489,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39759036144578314,
						"acc_stderr,none": 0.009809602996075818,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41646586345381525,
						"acc_stderr,none": 0.00988121593211599,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44859437751004017,
						"acc_stderr,none": 0.009968964736894261,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41365461847389556,
						"acc_stderr,none": 0.009871502159099368,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40441767068273093,
						"acc_stderr,none": 0.009837245625453012,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3485943775100402,
						"acc_stderr,none": 0.009551542053301828,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6304674808976596,
						"acc_stderr,none": 0.062016620175044196,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5962938451356717,
						"acc_stderr,none": 0.012626249735246581,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7835870284579749,
						"acc_stderr,none": 0.010597338079182233,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.71475843812045,
						"acc_stderr,none": 0.011619771152072337,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5691594970218399,
						"acc_stderr,none": 0.01274344303469841,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.599602911978822,
						"acc_stderr,none": 0.012609238175551167,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6671078755790867,
						"acc_stderr,none": 0.012127221798743735,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5433487756452681,
						"acc_stderr,none": 0.012818676452481957,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6803441429516877,
						"acc_stderr,none": 0.01200099306329728,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5512905360688286,
						"acc_stderr,none": 0.012799246690109744,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5956320317670417,
						"acc_stderr,none": 0.012629580396570942,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6340172071475844,
						"acc_stderr,none": 0.012396308684399377,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8186109238031019,
						"acc_stderr,none": 0.035284653916876206,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8761290322580645,
						"acc_stderr,none": 0.006833618649268943,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7591240875912408,
						"acc_stderr,none": 0.013815618043006161,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7756653992395437,
						"acc_stderr,none": 0.02577120320708472,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6952380952380952,
						"acc_stderr,none": 0.02597659935230537,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7837301587301587,
						"acc_stderr,none": 0.01835681232408577,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C0-rwkv-60_pth"
	},
	"./rwkv-x-dev/1_3-C0-rwkv-70_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6389515219842165,
						"acc_norm,none": 0.6414881623449831,
						"acc_norm_stderr,none": 0.09132133974564657,
						"acc_stderr,none": 0.11006027206593597,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4775,
						"acc_stderr,none": 0.04559464480446209,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0572,
						"acc_stderr,none": 0.030237317305595715,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8154477611940298,
						"acc_stderr,none": 0.16754264242317915,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2845468053491827,
						"acc_norm,none": 0.2845468053491827,
						"acc_norm_stderr,none": 0.1264973187844202,
						"acc_stderr,none": 0.1264973187844202,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.3150578483854258,
						"acc_norm,none": 0.3150578483854258,
						"acc_norm_stderr,none": 0.060825901820796,
						"acc_stderr,none": 0.060825901820796,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4583146988670244,
						"likelihood_diff_stderr,none": 0.5203507335620096,
						"pct_stereotype,none": 0.6138938580799046,
						"pct_stereotype_stderr,none": 0.07118790395039078
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.15354330708661418,
						"exact_match_stderr,none": 0.007999500228589217
					},
					"glue": {
						"acc,none": 0.7463673177703668,
						"acc_stderr,none": 0.004546647903474537,
						"alias": "glue",
						"f1,none": 0.719659290906775,
						"f1_stderr,none": 0.00011410345825277068,
						"mcc,none": 0.2538184101420976,
						"mcc_stderr,none": 0.031547917341415904
					},
					"kmmlu": {
						"acc,none": 0.2601501588218308,
						"acc_norm,none": 0.2601501588218308,
						"acc_norm_stderr,none": 0.022136135990927036,
						"acc_stderr,none": 0.022136135990927036,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5259811444858583,
						"acc_norm,none": 0.558,
						"acc_norm_stderr,none": 0.0004942605210420811,
						"acc_stderr,none": 0.04159120557341156,
						"alias": "kobest",
						"f1,none": 0.43338500370588745,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7173491170192121,
						"acc_stderr,none": 0.018142177898487488,
						"alias": "lambada",
						"perplexity,none": 3.584861786870358,
						"perplexity_stderr,none": 0.19127485129939906
					},
					"lambada_cloze": {
						"acc,none": 0.08732777023093344,
						"acc_stderr,none": 0.020745780851552195,
						"alias": "lambada_cloze",
						"perplexity,none": 254.72568437865908,
						"perplexity_stderr,none": 9.826644834692615
					},
					"lambada_multilingual": {
						"acc,none": 0.5392198719192703,
						"acc_stderr,none": 0.08651953559796134,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.73213239504858,
						"perplexity_stderr,none": 8.160976080984481
					},
					"mmlu": {
						"acc,none": 0.3999430280586811,
						"acc_stderr,none": 0.08635950811058715,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.38257173219978746,
						"acc_stderr,none": 0.0918370668537284,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.44448020598648214,
						"acc_stderr,none": 0.08525395601423287,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.43841403964900877,
						"acc_stderr,none": 0.07527032506953982,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.34443387250237867,
						"acc_stderr,none": 0.06565641358073873,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.3762952448545067,
						"acc_norm,none": 0.3487615198903956,
						"acc_norm_stderr,none": 0.00012208400286875805,
						"acc_stderr,none": 0.08315819899393075,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.48678571428571427,
						"acc_stderr,none": 0.05510670038912275,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7383168260083635,
						"acc_norm,none": 0.6453518655532496,
						"acc_norm_stderr,none": 0.01042876634587923,
						"acc_stderr,none": 0.15753668898943451,
						"alias": "pythia",
						"bits_per_byte,none": 0.6338882691646343,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5517415278558557,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2295992530296496,
						"perplexity_stderr,none": 0.06254342655595974,
						"word_perplexity,none": 10.480867512464803,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.40425531914893614,
						"acc_norm,none": 0.450354609929078,
						"acc_norm_stderr,none": 0.06767988691348358,
						"acc_stderr,none": 0.04923318221742833,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6968819673222189,
						"acc_stderr,none": 0.08096309710060864,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.32727737724820793,
						"acc_stderr,none": 0.0015362538719523623,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3317013463892289,
						"bleu_acc_stderr,none": 0.016482148810241473,
						"bleu_diff,none": -6.512966303026584,
						"bleu_diff_stderr,none": 0.8728353518103906,
						"bleu_max,none": 26.937982387212646,
						"bleu_max_stderr,none": 0.8169320733721972,
						"rouge1_acc,none": 0.2962056303549572,
						"rouge1_acc_stderr,none": 0.015983595101811392,
						"rouge1_diff,none": -8.734657558997394,
						"rouge1_diff_stderr,none": 0.952866418147229,
						"rouge1_max,none": 52.000317912608054,
						"rouge1_max_stderr,none": 0.888081115007632,
						"rouge2_acc,none": 0.2729498164014688,
						"rouge2_acc_stderr,none": 0.01559475363200653,
						"rouge2_diff,none": -10.380477320788014,
						"rouge2_diff_stderr,none": 1.1431126685043758,
						"rouge2_max,none": 35.827617650407035,
						"rouge2_max_stderr,none": 1.0377065231920681,
						"rougeL_acc,none": 0.29865361077111385,
						"rougeL_acc_stderr,none": 0.016021570613768542,
						"rougeL_diff,none": -9.10458901595442,
						"rougeL_diff_stderr,none": 0.9667154591379234,
						"rougeL_max,none": 49.15763342945929,
						"rougeL_max_stderr,none": 0.9012952883675601
					},
					"xcopa": {
						"acc,none": 0.6238181818181818,
						"acc_stderr,none": 0.07040862339150304,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43585006693440426,
						"acc_stderr,none": 0.0509123654087118,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6311894591179833,
						"acc_stderr,none": 0.0625903705087539,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8195100022476961,
						"acc_stderr,none": 0.045857811074837516,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6389515219842165,
						"acc_norm,none": 0.6414881623449831,
						"acc_norm_stderr,none": 0.09132133974564657,
						"acc_stderr,none": 0.11006027206593597,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4775,
						"acc_stderr,none": 0.04559464480446209,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.015663503610155283,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.443,
						"acc_stderr,none": 0.0157161699532041,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.42916666666666664,
						"acc_stderr,none": 0.014294141030409657,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4061433447098976,
						"acc_norm,none": 0.44880546075085326,
						"acc_norm_stderr,none": 0.014534599585097672,
						"acc_stderr,none": 0.014351656690097869,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7537878787878788,
						"acc_norm,none": 0.7365319865319865,
						"acc_norm_stderr,none": 0.009039157374497713,
						"acc_stderr,none": 0.008839902656771878,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0572,
						"acc_stderr,none": 0.030237317305595715,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.063,
						"acc_stderr,none": 0.005434175662652582,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.117,
						"acc_stderr,none": 0.0071889735477559495,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.071,
						"acc_stderr,none": 0.005744214306500109,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0215,
						"acc_stderr,none": 0.003244092641792836,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.021,
						"acc_stderr,none": 0.003206967776757444,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.075,
						"acc_stderr,none": 0.005891082449449559,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.007,
						"acc_stderr,none": 0.0018647355360237455,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.1185,
						"acc_stderr,none": 0.007228762169365424,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.004,
						"acc_stderr,none": 0.0014117352790976717,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.08,
						"acc_stderr,none": 0.006067817499282815,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.006073752711496746,
						"acc_stderr,none": 0.0016186926522842625,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8154477611940298,
						"acc_stderr,none": 0.16754264242317915,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525044,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319409,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298133,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.844,
						"acc_stderr,none": 0.011480235006122342,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523745,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099186,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.571,
						"acc_stderr,none": 0.015658997547870243,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.01005510343582333,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849879,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469362,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.003845749574502989,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706846,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.00648892179842742,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.0069604200625714,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653916,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.00828206451270417,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274703,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.00896305396259209,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.01429714686251791,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.01465847437050901,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.721,
						"acc_stderr,none": 0.014190150117612035,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.007088105617246439,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.01001655286669687,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689078,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.444,
						"acc_stderr,none": 0.01571976816340209,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103298,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.819,
						"acc_stderr,none": 0.012181436179177916,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.592,
						"acc_stderr,none": 0.015549205052920673,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.719,
						"acc_stderr,none": 0.014221154708434935,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.00872852720607479,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696843,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240653,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996693,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.671,
						"acc_stderr,none": 0.01486539538592836,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.00877616208949112,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.398,
						"acc_stderr,none": 0.015486634102858924,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.614,
						"acc_stderr,none": 0.015402637476784371,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.015733516566347826,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425673,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.015778243024904586,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.00965801621852431,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103305,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.01351231225892084,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.00848457353011859,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767693,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.012510816141264368,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.595,
						"acc_stderr,none": 0.015531136990453042,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666659,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343972,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256563,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.014580006055436967,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.449,
						"acc_stderr,none": 0.01573679276875203,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.01031821038094609,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074801,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.599,
						"acc_stderr,none": 0.01550610974549832,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397332,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747394,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.575,
						"acc_stderr,none": 0.01564032031704011,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665544,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140915,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523712,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274703,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656803,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.359,
						"acc_stderr,none": 0.015177264224798597,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.307,
						"acc_stderr,none": 0.014593284892852621,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7229357798165138,
						"acc_stderr,none": 0.007827672048734533,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.04459412925079224,
						"alias": "cb",
						"f1,none": 0.7007651189602767,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2845468053491827,
						"acc_norm,none": 0.2845468053491827,
						"acc_norm_stderr,none": 0.1264973187844202,
						"acc_stderr,none": 0.1264973187844202,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141221,
						"acc_stderr,none": 0.06372446937141221,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5151515151515151,
						"acc_norm,none": 0.5151515151515151,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275463,
						"acc_stderr,none": 0.08124094920275463,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.0659529705144534,
						"acc_stderr,none": 0.0659529705144534,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.06288639360110458,
						"acc_stderr,none": 0.06288639360110458,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.12909944487358055,
						"acc_stderr,none": 0.12909944487358055,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.1042572070285374,
						"acc_stderr,none": 0.1042572070285374,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.14213381090374033,
						"acc_stderr,none": 0.14213381090374033,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033673,
						"acc_stderr,none": 0.10083169033033673,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633637,
						"acc_stderr,none": 0.09169709590633637,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.058172215566282534,
						"acc_stderr,none": 0.058172215566282534,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.07624928516630235,
						"acc_stderr,none": 0.07624928516630235,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.07099970268936748,
						"acc_stderr,none": 0.07099970268936748,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.3150578483854258,
						"acc_norm,none": 0.3150578483854258,
						"acc_norm_stderr,none": 0.060825901820796,
						"acc_stderr,none": 0.060825901820796,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03560846537586734,
						"acc_stderr,none": 0.03560846537586734,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.28378378378378377,
						"acc_norm,none": 0.28378378378378377,
						"acc_norm_stderr,none": 0.037184093212853736,
						"acc_stderr,none": 0.037184093212853736,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.033635910482728223,
						"acc_stderr,none": 0.033635910482728223,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.43125,
						"acc_norm,none": 0.43125,
						"acc_norm_stderr,none": 0.039275949840189193,
						"acc_stderr,none": 0.039275949840189193,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.03501438706296781,
						"acc_stderr,none": 0.03501438706296781,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3253588516746411,
						"acc_norm,none": 0.3253588516746411,
						"acc_norm_stderr,none": 0.03248523846063362,
						"acc_stderr,none": 0.03248523846063362,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.03612181848191273,
						"acc_stderr,none": 0.03612181848191273,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3282442748091603,
						"acc_norm,none": 0.3282442748091603,
						"acc_norm_stderr,none": 0.04118438565806298,
						"acc_stderr,none": 0.04118438565806298,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.0402637721078731,
						"acc_stderr,none": 0.0402637721078731,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.34579439252336447,
						"acc_norm,none": 0.34579439252336447,
						"acc_norm_stderr,none": 0.04619693596622581,
						"acc_stderr,none": 0.04619693596622581,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3591331269349845,
						"acc_norm,none": 0.3591331269349845,
						"acc_norm_stderr,none": 0.02673521673543995,
						"acc_stderr,none": 0.02673521673543995,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.30392156862745096,
						"acc_norm,none": 0.30392156862745096,
						"acc_norm_stderr,none": 0.03228210387037892,
						"acc_stderr,none": 0.03228210387037892,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.40782122905027934,
						"acc_norm,none": 0.40782122905027934,
						"acc_norm_stderr,none": 0.03683420752157822,
						"acc_stderr,none": 0.03683420752157822,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24472573839662448,
						"acc_norm,none": 0.24472573839662448,
						"acc_norm_stderr,none": 0.027985699387036423,
						"acc_stderr,none": 0.027985699387036423,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.4205607476635514,
						"acc_norm,none": 0.4205607476635514,
						"acc_norm_stderr,none": 0.04794743635189596,
						"acc_stderr,none": 0.04794743635189596,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.37735849056603776,
						"acc_norm,none": 0.37735849056603776,
						"acc_norm_stderr,none": 0.04730439022852895,
						"acc_stderr,none": 0.04730439022852895,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809805,
						"acc_stderr,none": 0.039578354719809805,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714285,
						"acc_stderr,none": 0.04285714285714285,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.04396093377439375,
						"acc_stderr,none": 0.04396093377439375,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.28205128205128205,
						"acc_norm,none": 0.28205128205128205,
						"acc_norm_stderr,none": 0.02728514708163732,
						"acc_stderr,none": 0.02728514708163732,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.033086111132364364,
						"acc_stderr,none": 0.033086111132364364,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.26900584795321636,
						"acc_norm,none": 0.26900584795321636,
						"acc_norm_stderr,none": 0.03401052620104089,
						"acc_stderr,none": 0.03401052620104089,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.3129251700680272,
						"acc_norm,none": 0.3129251700680272,
						"acc_norm_stderr,none": 0.03837477482026868,
						"acc_stderr,none": 0.03837477482026868,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.26618705035971224,
						"acc_norm,none": 0.26618705035971224,
						"acc_norm_stderr,none": 0.03762240935089088,
						"acc_stderr,none": 0.03762240935089088,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3710691823899371,
						"acc_norm,none": 0.3710691823899371,
						"acc_norm_stderr,none": 0.03843265063227864,
						"acc_stderr,none": 0.03843265063227864,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3619631901840491,
						"acc_norm,none": 0.3619631901840491,
						"acc_norm_stderr,none": 0.037757007291414416,
						"acc_stderr,none": 0.037757007291414416,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.03407826167337437,
						"acc_stderr,none": 0.03407826167337437,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.028394293050790505,
						"acc_stderr,none": 0.028394293050790505,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.29797979797979796,
						"acc_norm,none": 0.29797979797979796,
						"acc_norm_stderr,none": 0.032586303838365555,
						"acc_stderr,none": 0.032586303838365555,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.4411764705882353,
						"acc_norm,none": 0.4411764705882353,
						"acc_norm_stderr,none": 0.032252942323996406,
						"acc_stderr,none": 0.032252942323996406,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.22608695652173913,
						"acc_norm,none": 0.22608695652173913,
						"acc_norm_stderr,none": 0.02764178570724133,
						"acc_stderr,none": 0.02764178570724133,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.3111111111111111,
						"acc_norm,none": 0.3111111111111111,
						"acc_norm_stderr,none": 0.039992628766177214,
						"acc_stderr,none": 0.039992628766177214,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03873144730600104,
						"acc_stderr,none": 0.03873144730600104,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.3352272727272727,
						"acc_norm,none": 0.3352272727272727,
						"acc_norm_stderr,none": 0.03568512682153708,
						"acc_stderr,none": 0.03568512682153708,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3221476510067114,
						"acc_norm,none": 0.3221476510067114,
						"acc_norm_stderr,none": 0.038411757592369186,
						"acc_stderr,none": 0.038411757592369186,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331528,
						"acc_stderr,none": 0.03360300796331528,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2966101694915254,
						"acc_norm,none": 0.2966101694915254,
						"acc_norm_stderr,none": 0.042227768322336254,
						"acc_stderr,none": 0.042227768322336254,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.041723430387053825,
						"acc_stderr,none": 0.041723430387053825,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.34965034965034963,
						"acc_norm,none": 0.34965034965034963,
						"acc_norm_stderr,none": 0.04001716028382393,
						"acc_stderr,none": 0.04001716028382393,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.03932537680392871,
						"acc_stderr,none": 0.03932537680392871,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336699,
						"acc_stderr,none": 0.03333068663336699,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3953488372093023,
						"acc_norm,none": 0.3953488372093023,
						"acc_norm_stderr,none": 0.03738906664833521,
						"acc_stderr,none": 0.03738906664833521,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2822384428223844,
						"acc_norm,none": 0.2822384428223844,
						"acc_norm_stderr,none": 0.022228300145424457,
						"acc_stderr,none": 0.022228300145424457,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.4672897196261682,
						"acc_norm,none": 0.4672897196261682,
						"acc_norm_stderr,none": 0.034186044262789386,
						"acc_stderr,none": 0.034186044262789386,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3089430894308943,
						"acc_norm,none": 0.3089430894308943,
						"acc_norm_stderr,none": 0.041832732587876245,
						"acc_stderr,none": 0.041832732587876245,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.32786885245901637,
						"acc_norm,none": 0.32786885245901637,
						"acc_norm_stderr,none": 0.04267606874299955,
						"acc_stderr,none": 0.04267606874299955,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03260773253630126,
						"acc_stderr,none": 0.03260773253630126,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.32222222222222224,
						"acc_norm,none": 0.32222222222222224,
						"acc_norm_stderr,none": 0.03492970288642683,
						"acc_stderr,none": 0.03492970288642683,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.034380708208626445,
						"acc_stderr,none": 0.034380708208626445,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.29310344827586204,
						"acc_norm,none": 0.29310344827586204,
						"acc_norm_stderr,none": 0.04244626443180182,
						"acc_stderr,none": 0.04244626443180182,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2896551724137931,
						"acc_norm,none": 0.2896551724137931,
						"acc_norm_stderr,none": 0.03780019230438014,
						"acc_stderr,none": 0.03780019230438014,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3619047619047619,
						"acc_norm,none": 0.3619047619047619,
						"acc_norm_stderr,none": 0.0471219474848361,
						"acc_stderr,none": 0.0471219474848361,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.30857142857142855,
						"acc_norm,none": 0.30857142857142855,
						"acc_norm_stderr,none": 0.035016835199101176,
						"acc_stderr,none": 0.035016835199101176,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846655,
						"acc_stderr,none": 0.030469670650846655,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.27393617021276595,
						"acc_norm,none": 0.27393617021276595,
						"acc_norm_stderr,none": 0.02303013358548191,
						"acc_stderr,none": 0.02303013358548191,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.0312732353098133,
						"acc_stderr,none": 0.0312732353098133,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3505747126436782,
						"acc_norm,none": 0.3505747126436782,
						"acc_norm_stderr,none": 0.03627703962615276,
						"acc_stderr,none": 0.03627703962615276,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800254,
						"acc_stderr,none": 0.03885004245800254,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.34513274336283184,
						"acc_norm,none": 0.34513274336283184,
						"acc_norm_stderr,none": 0.031694102698674474,
						"acc_stderr,none": 0.031694102698674474,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.03756335775187897,
						"acc_stderr,none": 0.03756335775187897,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.34911242603550297,
						"acc_norm,none": 0.34911242603550297,
						"acc_norm_stderr,none": 0.03677739827593944,
						"acc_stderr,none": 0.03677739827593944,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.37267080745341613,
						"acc_norm,none": 0.37267080745341613,
						"acc_norm_stderr,none": 0.03822525970525206,
						"acc_stderr,none": 0.03822525970525206,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.03782614981812041,
						"acc_stderr,none": 0.03782614981812041,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.2538184101420976,
						"mcc_stderr,none": 0.031547917341415904
					},
					"copa": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.0377525168068637,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4583146988670244,
						"likelihood_diff_stderr,none": 0.5203507335620096,
						"pct_stereotype,none": 0.6138938580799046,
						"pct_stereotype_stderr,none": 0.07118790395039078
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.7183214072748956,
						"likelihood_diff_stderr,none": 0.08741831497937609,
						"pct_stereotype,none": 0.6440071556350626,
						"pct_stereotype_stderr,none": 0.011695774156934206
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.072802197802198,
						"likelihood_diff_stderr,none": 0.3889894958639606,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.0,
						"likelihood_diff_stderr,none": 1.817215652384914,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.961538461538462,
						"likelihood_diff_stderr,none": 0.6039292109708765,
						"pct_stereotype,none": 0.7846153846153846,
						"pct_stereotype_stderr,none": 0.05138611236879767
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.68671875,
						"likelihood_diff_stderr,none": 0.1649897004090283,
						"pct_stereotype,none": 0.63125,
						"pct_stereotype_stderr,none": 0.027012909806946844
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.596064814814815,
						"likelihood_diff_stderr,none": 0.2446554206895788,
						"pct_stereotype,none": 0.5787037037037037,
						"pct_stereotype_stderr,none": 0.03367462138896078
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.8940972222222223,
						"likelihood_diff_stderr,none": 0.31591115282427845,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.5533956692913384,
						"likelihood_diff_stderr,none": 0.15315893627620175,
						"pct_stereotype,none": 0.5570866141732284,
						"pct_stereotype_stderr,none": 0.022060572810922933
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.68018018018018,
						"likelihood_diff_stderr,none": 0.3416574953000578,
						"pct_stereotype,none": 0.7117117117117117,
						"pct_stereotype_stderr,none": 0.04318860867532051
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.096774193548387,
						"likelihood_diff_stderr,none": 0.43931047382207766,
						"pct_stereotype,none": 0.9139784946236559,
						"pct_stereotype_stderr,none": 0.029233283218071043
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.255263157894737,
						"likelihood_diff_stderr,none": 0.24414052194171124,
						"pct_stereotype,none": 0.6842105263157895,
						"pct_stereotype_stderr,none": 0.03381137233892748
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.1990533691115086,
						"likelihood_diff_stderr,none": 0.0742760681016346,
						"pct_stereotype,none": 0.5837805605247466,
						"pct_stereotype_stderr,none": 0.012040623801379574
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.3305555555555557,
						"likelihood_diff_stderr,none": 0.3181423279787101,
						"pct_stereotype,none": 0.5555555555555556,
						"pct_stereotype_stderr,none": 0.05267171812666418
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.8846153846153846,
						"likelihood_diff_stderr,none": 0.833308678136259,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.821969696969697,
						"likelihood_diff_stderr,none": 0.43276921608574137,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.056159743502623156
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.720404984423676,
						"likelihood_diff_stderr,none": 0.12841305558298624,
						"pct_stereotype,none": 0.5950155763239875,
						"pct_stereotype_stderr,none": 0.0274415310396384
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.2885375494071147,
						"likelihood_diff_stderr,none": 0.19417843127792722,
						"pct_stereotype,none": 0.42292490118577075,
						"pct_stereotype_stderr,none": 0.031120568731718617
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.4756944444444446,
						"likelihood_diff_stderr,none": 0.43996371250942284,
						"pct_stereotype,none": 0.6527777777777778,
						"pct_stereotype_stderr,none": 0.056501146768529645
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.8453804347826086,
						"likelihood_diff_stderr,none": 0.14214320347033857,
						"pct_stereotype,none": 0.4934782608695652,
						"pct_stereotype_stderr,none": 0.023336016041798566
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.391304347826087,
						"likelihood_diff_stderr,none": 0.27278213259820144,
						"pct_stereotype,none": 0.7652173913043478,
						"pct_stereotype_stderr,none": 0.039698395317531235
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.598901098901099,
						"likelihood_diff_stderr,none": 0.31275488069418356,
						"pct_stereotype,none": 0.8351648351648352,
						"pct_stereotype_stderr,none": 0.03911017674736743
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.6910076530612246,
						"likelihood_diff_stderr,none": 0.24465070746146472,
						"pct_stereotype,none": 0.6989795918367347,
						"pct_stereotype_stderr,none": 0.032848301055273386
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.15354330708661418,
						"exact_match_stderr,none": 0.007999500228589217
					},
					"glue": {
						"acc,none": 0.7463673177703668,
						"acc_stderr,none": 0.004546647903474537,
						"alias": "glue",
						"f1,none": 0.719659290906775,
						"f1_stderr,none": 0.00011410345825277068,
						"mcc,none": 0.2538184101420976,
						"mcc_stderr,none": 0.031547917341415904
					},
					"hellaswag": {
						"acc,none": 0.5403306114319857,
						"acc_norm,none": 0.7267476598287194,
						"acc_norm_stderr,none": 0.004447185883327442,
						"acc_stderr,none": 0.004973522582431211,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2601501588218308,
						"acc_norm,none": 0.2601501588218308,
						"acc_norm_stderr,none": 0.022136135990927036,
						"acc_stderr,none": 0.022136135990927036,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909283,
						"acc_stderr,none": 0.04292346959909283,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.013772206565168543,
						"acc_stderr,none": 0.013772206565168543,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.013790038620872818,
						"acc_stderr,none": 0.013790038620872818,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259738,
						"acc_stderr,none": 0.013929286594259738,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.2633333333333333,
						"acc_norm,none": 0.2633333333333333,
						"acc_norm_stderr,none": 0.017995959892029623,
						"acc_stderr,none": 0.017995959892029623,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.013825416526895038,
						"acc_stderr,none": 0.013825416526895038,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.31,
						"acc_norm,none": 0.31,
						"acc_norm_stderr,none": 0.014632638658632896,
						"acc_stderr,none": 0.014632638658632896,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.01420569610409151,
						"acc_stderr,none": 0.01420569610409151,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.032328014206142654,
						"acc_stderr,none": 0.032328014206142654,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.289,
						"acc_norm,none": 0.289,
						"acc_norm_stderr,none": 0.014341711358296188,
						"acc_stderr,none": 0.014341711358296188,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2153846153846154,
						"acc_norm,none": 0.2153846153846154,
						"acc_norm_stderr,none": 0.03619435936612662,
						"acc_stderr,none": 0.03619435936612662,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.046056618647183814,
						"acc_stderr,none": 0.046056618647183814,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.013790038620872833,
						"acc_stderr,none": 0.013790038620872833,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568193,
						"acc_stderr,none": 0.014029819522568193,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.238,
						"acc_norm,none": 0.238,
						"acc_norm_stderr,none": 0.013473586661967223,
						"acc_stderr,none": 0.013473586661967223,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.013493000446937596,
						"acc_stderr,none": 0.013493000446937596,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314125,
						"acc_stderr,none": 0.013644675781314125,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750641,
						"acc_stderr,none": 0.013626065817750641,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234183,
						"acc_stderr,none": 0.013807775152234183,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633916,
						"acc_stderr,none": 0.014046255632633916,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.044619604333847394,
						"acc_stderr,none": 0.044619604333847394,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.01392928659425972,
						"acc_stderr,none": 0.01392928659425972,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462613,
						"acc_stderr,none": 0.014078856992462613,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796273,
						"acc_stderr,none": 0.013996674851796273,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.013531522534515446,
						"acc_stderr,none": 0.013531522534515446,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.013550631705555958,
						"acc_stderr,none": 0.013550631705555958,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.017572886612805254,
						"acc_stderr,none": 0.017572886612805254,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.258,
						"acc_norm,none": 0.258,
						"acc_norm_stderr,none": 0.013842963108656604,
						"acc_stderr,none": 0.013842963108656604,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.01391220865102135,
						"acc_stderr,none": 0.01391220865102135,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.013493000446937591,
						"acc_stderr,none": 0.013493000446937591,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259727,
						"acc_stderr,none": 0.013929286594259727,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.02596627604487785,
						"acc_stderr,none": 0.02596627604487785,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796271,
						"acc_stderr,none": 0.013996674851796271,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.01404625563263391,
						"acc_stderr,none": 0.01404625563263391,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259727,
						"acc_stderr,none": 0.013929286594259727,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.03089738243291861,
						"acc_stderr,none": 0.03089738243291861,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.232,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.013354937452281552,
						"acc_stderr,none": 0.013354937452281552,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234192,
						"acc_stderr,none": 0.013807775152234192,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.030695456590127176,
						"acc_stderr,none": 0.030695456590127176,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.01456664639466438,
						"acc_stderr,none": 0.01456664639466438,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5259811444858583,
						"acc_norm,none": 0.558,
						"acc_norm_stderr,none": 0.0004942605210420811,
						"acc_stderr,none": 0.04159120557341156,
						"alias": "kobest",
						"f1,none": 0.43338500370588745,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5306267806267806,
						"acc_stderr,none": 0.013323701685038658,
						"alias": " - kobest_boolq",
						"f1,none": 0.40506600153936073,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.615,
						"acc_stderr,none": 0.015395194445410806,
						"alias": " - kobest_copa",
						"f1,none": 0.6137450576924428,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.436,
						"acc_norm,none": 0.558,
						"acc_norm_stderr,none": 0.022231970696321122,
						"acc_stderr,none": 0.0221989546414768,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.430801803942804,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5188916876574308,
						"acc_stderr,none": 0.025108004284191587,
						"alias": " - kobest_sentineg",
						"f1,none": 0.41695308835628553,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7173491170192121,
						"acc_stderr,none": 0.018142177898487488,
						"alias": "lambada",
						"perplexity,none": 3.584861786870358,
						"perplexity_stderr,none": 0.19127485129939906
					},
					"lambada_cloze": {
						"acc,none": 0.08732777023093344,
						"acc_stderr,none": 0.020745780851552195,
						"alias": "lambada_cloze",
						"perplexity,none": 254.72568437865908,
						"perplexity_stderr,none": 9.826644834692615
					},
					"lambada_multilingual": {
						"acc,none": 0.5392198719192703,
						"acc_stderr,none": 0.08651953559796134,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.73213239504858,
						"perplexity_stderr,none": 8.160976080984481
					},
					"lambada_openai": {
						"acc,none": 0.7500485154279061,
						"acc_stderr,none": 0.006032323323255978,
						"alias": " - lambada_openai",
						"perplexity,none": 3.2295992530296496,
						"perplexity_stderr,none": 0.06254342655595974
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.04657481078983117,
						"acc_stderr,none": 0.0029358301977242167,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 242.92156293032266,
						"perplexity_stderr,none": 7.3475198162731745
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4267417038618281,
						"acc_stderr,none": 0.006890802308382395,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.328602777848204,
						"perplexity_stderr,none": 1.9082523213156408
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7512128856976519,
						"acc_stderr,none": 0.006022926018315454,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.2301704009243726,
						"perplexity_stderr,none": 0.06244716297918055
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4574034542984669,
						"acc_stderr,none": 0.0069406525668713895,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.16781777540783,
						"perplexity_stderr,none": 1.383703340457666
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5472540267805162,
						"acc_stderr,none": 0.006934798617263731,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.373666172845848,
						"perplexity_stderr,none": 0.7928781851954997
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5134872889578886,
						"acc_stderr,none": 0.0069634428763276955,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.560404848216645,
						"perplexity_stderr,none": 1.1415464779282023
					},
					"lambada_standard": {
						"acc,none": 0.6832912866291481,
						"acc_stderr,none": 0.0064810447492287294,
						"alias": " - lambada_standard",
						"perplexity,none": 3.939514668645153,
						"perplexity_stderr,none": 0.07985223784391436
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.1280807296720357,
						"acc_stderr,none": 0.004655776323581049,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 266.5298058269955,
						"perplexity_stderr,none": 8.335252158670748
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.3384223918575064,
						"exact_match_stderr,get-answer": 0.011938007614742233
					},
					"logiqa": {
						"acc,none": 0.2411674347158218,
						"acc_norm,none": 0.282642089093702,
						"acc_norm_stderr,none": 0.01766158537036062,
						"acc_stderr,none": 0.016779369344911064,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.25636132315521626,
						"acc_norm,none": 0.28498727735368956,
						"acc_norm_stderr,none": 0.01138889341093061,
						"acc_stderr,none": 0.011015878683092594,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2576214405360134,
						"acc_norm,none": 0.26800670016750416,
						"acc_norm_stderr,none": 0.008108246985625399,
						"acc_stderr,none": 0.00800579201170255,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.5098496081338699,
						"acc_stderr,none": 0.005144897456294346,
						"alias": "mc_taco",
						"f1,none": 0.5604103343465046,
						"f1_stderr,none": 0.005823153340209776
					},
					"medmcqa": {
						"acc,none": 0.3397083432942864,
						"acc_norm,none": 0.3397083432942864,
						"acc_norm_stderr,none": 0.00732367388247889,
						"acc_stderr,none": 0.00732367388247889,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3652788688138256,
						"acc_norm,none": 0.3652788688138256,
						"acc_norm_stderr,none": 0.013500821568639267,
						"acc_stderr,none": 0.013500821568639267,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.3999430280586811,
						"acc_stderr,none": 0.08635950811058715,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.04232073695151589,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.40131578947368424,
						"acc_stderr,none": 0.039889037033362836,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.41509433962264153,
						"acc_stderr,none": 0.03032594578928611,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4027777777777778,
						"acc_stderr,none": 0.04101405519842425,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939098,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.37572254335260113,
						"acc_stderr,none": 0.036928207672648664,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.042801058373643966,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.04975698519562427,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.34893617021276596,
						"acc_stderr,none": 0.031158522131357762,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.039994238792813365,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4206896551724138,
						"acc_stderr,none": 0.0411391498118926,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.30952380952380953,
						"acc_stderr,none": 0.023809523809523864,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3492063492063492,
						"acc_stderr,none": 0.04263906892795132,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.45161290322580644,
						"acc_stderr,none": 0.02831050034856839,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.29064039408866993,
						"acc_stderr,none": 0.0319474007226554,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.049431107042371025,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5393939393939394,
						"acc_stderr,none": 0.03892207016552012,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.45454545454545453,
						"acc_stderr,none": 0.03547601494006937,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5492227979274611,
						"acc_stderr,none": 0.035909109522355265,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3564102564102564,
						"acc_stderr,none": 0.024283140529467305,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.32222222222222224,
						"acc_stderr,none": 0.028493465091028597,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3403361344537815,
						"acc_stderr,none": 0.030778057422931673,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5174311926605505,
						"acc_stderr,none": 0.021424291871853147,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.27314814814814814,
						"acc_stderr,none": 0.030388051301678116,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5049019607843137,
						"acc_stderr,none": 0.035091433756067845,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.6118143459915611,
						"acc_stderr,none": 0.031722950043323296,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3811659192825112,
						"acc_stderr,none": 0.032596251184168264,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5038167938931297,
						"acc_stderr,none": 0.043851623256015534,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.38257173219978746,
						"acc_stderr,none": 0.0918370668537284,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4462809917355372,
						"acc_stderr,none": 0.0453793517794788,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.4537037037037037,
						"acc_stderr,none": 0.048129173245368216,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.37423312883435583,
						"acc_stderr,none": 0.03802068102899615,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2767857142857143,
						"acc_stderr,none": 0.04246624336697624,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5145631067961165,
						"acc_stderr,none": 0.049486373240266356,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6367521367521367,
						"acc_stderr,none": 0.03150712523091265,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.05,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.561941251596424,
						"acc_stderr,none": 0.017742232238257237,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3988439306358382,
						"acc_stderr,none": 0.02636243757454654,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.22346368715083798,
						"acc_stderr,none": 0.013932068638579759,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.42483660130718953,
						"acc_stderr,none": 0.028304576673141124,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.44448020598648214,
						"acc_stderr,none": 0.08525395601423287,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4983922829581994,
						"acc_stderr,none": 0.02839794490780661,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4351851851851852,
						"acc_stderr,none": 0.027586006221607718,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.3049645390070922,
						"acc_stderr,none": 0.02746470844202213,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3350717079530639,
						"acc_stderr,none": 0.01205549947133038,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3639705882352941,
						"acc_stderr,none": 0.029227192460032025,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.4084967320261438,
						"acc_stderr,none": 0.019886221037501865,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.0469237132203465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.39183673469387753,
						"acc_stderr,none": 0.03125127591089165,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.43841403964900877,
						"acc_stderr,none": 0.07527032506953982,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5671641791044776,
						"acc_stderr,none": 0.0350349092367328,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.34443387250237867,
						"acc_stderr,none": 0.06565641358073873,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.05009082659620332,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.0371172519074075,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6257309941520468,
						"acc_stderr,none": 0.03711601185389482,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7835965359144167,
						"acc_stderr,none": 0.004156763645459123,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7901749389747762,
						"acc_stderr,none": 0.0041066856133362434,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7328431372549019,
						"acc_stderr,none": 0.021932668544150206,
						"alias": "mrpc",
						"f1,none": 0.8304821150855366,
						"f1_stderr,none": 0.0160357783151771
					},
					"multimedqa": {
						"acc,none": 0.3762952448545067,
						"acc_norm,none": 0.3487615198903956,
						"acc_norm_stderr,none": 0.00012208400286875805,
						"acc_stderr,none": 0.08315819899393075,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5245462046204621,
						"acc_stderr,none": 0.0071731435804545576,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7056997758809385,
						"mrr_stderr,none": 0.01037786523412564,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407455,
						"r@2,none": 0.4074492099322799,
						"r@2_stderr,none": 0.01651687550847704
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6567908217352615,
						"mrr_stderr,none": 0.010445041361499404,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.46613995485327314,
						"r@2_stderr,none": 0.01676873258411582
					},
					"openbookqa": {
						"acc,none": 0.302,
						"acc_norm,none": 0.412,
						"acc_norm_stderr,none": 0.02203367799374087,
						"acc_stderr,none": 0.020553269174209188,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4355,
						"acc_stderr,none": 0.011089696374691106,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.010856285251628968,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.426,
						"acc_stderr,none": 0.011059980179945498,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.547,
						"acc_stderr,none": 0.011133619300989868,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5525,
						"acc_stderr,none": 0.011121318125943093,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.547,
						"acc_stderr,none": 0.011133619300989868,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5195,
						"acc_stderr,none": 0.011174628009718161,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48678571428571427,
						"acc_stderr,none": 0.05510670038912275,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7720348204570185,
						"acc_norm,none": 0.7840043525571273,
						"acc_norm_stderr,none": 0.00960123630355355,
						"acc_stderr,none": 0.009788093832324912,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.24343509820666098,
						"acc_norm,none": 0.2846925704526046,
						"acc_norm_stderr,none": 0.0032969137518679705,
						"acc_stderr,none": 0.003135363104499412,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.0206670329874661,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7383168260083635,
						"acc_norm,none": 0.6453518655532496,
						"acc_norm_stderr,none": 0.01042876634587923,
						"acc_stderr,none": 0.15753668898943451,
						"alias": "pythia",
						"bits_per_byte,none": 0.6338882691646343,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5517415278558557,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2295992530296496,
						"perplexity_stderr,none": 0.06254342655595974,
						"word_perplexity,none": 10.480867512464803,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.40425531914893614,
						"acc_norm,none": 0.450354609929078,
						"acc_norm_stderr,none": 0.06767988691348358,
						"acc_stderr,none": 0.04923318221742833,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.475,
						"acc_norm,none": 0.5833333333333334,
						"acc_norm_stderr,none": 0.0451938453788867,
						"acc_stderr,none": 0.04577759534198058,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.34375,
						"acc_norm,none": 0.45625,
						"acc_norm_stderr,none": 0.039500492593059405,
						"acc_stderr,none": 0.03766668927755763,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.4084507042253521,
						"acc_norm,none": 0.3908450704225352,
						"acc_norm_stderr,none": 0.029005007569909827,
						"acc_stderr,none": 0.029219452741745366,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.006765015986877446,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.758867177838239,
						"acc_stderr,none": 0.002127475412249373,
						"alias": "qqp",
						"f1,none": 0.7188950722297511,
						"f1_stderr,none": 0.0027277932444339558
					},
					"race": {
						"acc,none": 0.34258373205741627,
						"acc_stderr,none": 0.01468768473714516,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2687,
						"em_stderr,none": 0.00443305720689666,
						"f1,none": 0.27826380976438525,
						"f1_stderr,none": 0.004444239683295442
					},
					"rte": {
						"acc,none": 0.6714801444043321,
						"acc_stderr,none": 0.028271109855219828,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.958,
						"acc_norm,none": 0.959,
						"acc_norm_stderr,none": 0.006273624021118796,
						"acc_stderr,none": 0.006346359293033853,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6678700361010831,
						"acc_stderr,none": 0.028349504186256848,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8967889908256881,
						"acc_stderr,none": 0.010308585297584697,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5804258722383285,
						"acc_norm,none": 0.7709187243826852,
						"acc_norm_stderr,none": 0.0029711858327324285,
						"acc_stderr,none": 0.0034890601268237173,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6968819673222189,
						"acc_stderr,none": 0.08096309710060864,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6107772435897436,
						"acc_stderr,none": 0.004879889638010203,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.910915171784737,
						"acc_stderr,none": 0.0028679414818845944,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5741176470588235,
						"acc_stderr,none": 0.004896282583251982,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.32727737724820793,
						"acc_stderr,none": 0.0015362538719523623,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3317013463892289,
						"bleu_acc_stderr,none": 0.016482148810241473,
						"bleu_diff,none": -6.512966303026584,
						"bleu_diff_stderr,none": 0.8728353518103906,
						"bleu_max,none": 26.937982387212646,
						"bleu_max_stderr,none": 0.8169320733721972,
						"rouge1_acc,none": 0.2962056303549572,
						"rouge1_acc_stderr,none": 0.015983595101811392,
						"rouge1_diff,none": -8.734657558997394,
						"rouge1_diff_stderr,none": 0.952866418147229,
						"rouge1_max,none": 52.000317912608054,
						"rouge1_max_stderr,none": 0.888081115007632,
						"rouge2_acc,none": 0.2729498164014688,
						"rouge2_acc_stderr,none": 0.01559475363200653,
						"rouge2_diff,none": -10.380477320788014,
						"rouge2_diff_stderr,none": 1.1431126685043758,
						"rouge2_max,none": 35.827617650407035,
						"rouge2_max_stderr,none": 1.0377065231920681,
						"rougeL_acc,none": 0.29865361077111385,
						"rougeL_acc_stderr,none": 0.016021570613768542,
						"rougeL_diff,none": -9.10458901595442,
						"rougeL_diff_stderr,none": 0.9667154591379234,
						"rougeL_max,none": 49.15763342945929,
						"rougeL_max_stderr,none": 0.9012952883675601
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3317013463892289,
						"bleu_acc_stderr,none": 0.016482148810241473,
						"bleu_diff,none": -6.512966303026584,
						"bleu_diff_stderr,none": 0.8728353518103906,
						"bleu_max,none": 26.937982387212646,
						"bleu_max_stderr,none": 0.8169320733721972,
						"rouge1_acc,none": 0.2962056303549572,
						"rouge1_acc_stderr,none": 0.015983595101811392,
						"rouge1_diff,none": -8.734657558997394,
						"rouge1_diff_stderr,none": 0.952866418147229,
						"rouge1_max,none": 52.000317912608054,
						"rouge1_max_stderr,none": 0.888081115007632,
						"rouge2_acc,none": 0.2729498164014688,
						"rouge2_acc_stderr,none": 0.01559475363200653,
						"rouge2_diff,none": -10.380477320788014,
						"rouge2_diff_stderr,none": 1.1431126685043758,
						"rouge2_max,none": 35.827617650407035,
						"rouge2_max_stderr,none": 1.0377065231920681,
						"rougeL_acc,none": 0.29865361077111385,
						"rougeL_acc_stderr,none": 0.016021570613768542,
						"rougeL_diff,none": -9.10458901595442,
						"rougeL_diff_stderr,none": 0.9667154591379234,
						"rougeL_max,none": 49.15763342945929,
						"rougeL_max_stderr,none": 0.9012952883675601
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25458996328029376,
						"acc_stderr,none": 0.015250117079156474,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.39996479121612216,
						"acc_stderr,none": 0.014030568344042541,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.15600393700787402,
						"exact_match_stderr,none": 0.00805161551373712
					},
					"wic": {
						"acc,none": 0.5783699059561128,
						"acc_stderr,none": 0.019565859392130992,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6338972124570468,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5517511471592103,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.481214947896186,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7182320441988951,
						"acc_stderr,none": 0.012643326011852944,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.43661971830985913,
						"acc_stderr,none": 0.0592793555841297,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.41346153846153844,
						"acc_stderr,none": 0.04852294969729052,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8681318681318682,
						"acc_stderr,none": 0.020515321360773595,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6238181818181818,
						"acc_stderr,none": 0.07040862339150304,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.598,
						"acc_stderr,none": 0.02194892960993861,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.022342748192502843,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177514,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.742,
						"acc_stderr,none": 0.019586711785215837,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.022270877485360437,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228134,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.646,
						"acc_stderr,none": 0.021407582047916447,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.020143572847290785,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.02055326917420918,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43585006693440426,
						"acc_stderr,none": 0.0509123654087118,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3353413654618474,
						"acc_stderr,none": 0.009463034891512708,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4807228915662651,
						"acc_stderr,none": 0.010014621554188637,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4863453815261044,
						"acc_stderr,none": 0.01001833496714855,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3827309236947791,
						"acc_stderr,none": 0.009742526340884055,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5289156626506024,
						"acc_stderr,none": 0.010005299609236084,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5024096385542168,
						"acc_stderr,none": 0.010021956483068094,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4967871485943775,
						"acc_stderr,none": 0.010021865961119555,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41325301204819276,
						"acc_stderr,none": 0.009870087435623783,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.48032128514056227,
						"acc_stderr,none": 0.010014307727112709,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3967871485943775,
						"acc_stderr,none": 0.009806220246670024,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41164658634538154,
						"acc_stderr,none": 0.00986436082175034,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.45220883534136547,
						"acc_stderr,none": 0.00997618708680372,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41566265060240964,
						"acc_stderr,none": 0.009878474341822936,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40240963855421685,
						"acc_stderr,none": 0.009829321288467441,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3522088353413655,
						"acc_stderr,none": 0.009574259292495745,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6311894591179833,
						"acc_stderr,none": 0.0625903705087539,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5956320317670417,
						"acc_stderr,none": 0.012629580396570944,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.786234281932495,
						"acc_stderr,none": 0.01055009920692158,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7160820648577101,
						"acc_stderr,none": 0.011603508867763129,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.57114493712773,
						"acc_stderr,none": 0.01273620271314777,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5982792852415619,
						"acc_stderr,none": 0.0126161145269279,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6657842488418266,
						"acc_stderr,none": 0.012139246810918223,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5420251489080079,
						"acc_stderr,none": 0.012821595164245275,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.684976836532098,
						"acc_stderr,none": 0.011954205387840939,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5559232296492389,
						"acc_stderr,none": 0.012786390539820834,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5929847782925215,
						"acc_stderr,none": 0.012642664836816931,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6340172071475844,
						"acc_stderr,none": 0.012396308684399374,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8195100022476961,
						"acc_stderr,none": 0.045857811074837516,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8769892473118279,
						"acc_stderr,none": 0.006813191726515801,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6867469879518072,
						"acc_stderr,none": 0.051219942106581456,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7591240875912408,
						"acc_stderr,none": 0.013815618043006166,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7832699619771863,
						"acc_stderr,none": 0.0254545042911426,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6984126984126984,
						"acc_stderr,none": 0.025899880794833657,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7857142857142857,
						"acc_stderr,none": 0.01829552775577619,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C0-rwkv-70_pth"
	},
	"./rwkv-x-dev/1_3-C1-rwkv-190_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.641206313416009,
						"acc_norm,none": 0.6420518602029313,
						"acc_norm_stderr,none": 0.08959458082391243,
						"acc_stderr,none": 0.1071249724648545,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4840625,
						"acc_stderr,none": 0.051436171869769835,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0733,
						"acc_stderr,none": 0.051768802018898344,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8083134328358208,
						"acc_stderr,none": 0.16512647975167588,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.27488855869242196,
						"acc_norm,none": 0.27488855869242196,
						"acc_norm_stderr,none": 0.12255744543819905,
						"acc_stderr,none": 0.12255744543819905,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2952857882921776,
						"acc_norm,none": 0.2952857882921776,
						"acc_norm_stderr,none": 0.05148212485556968,
						"acc_stderr,none": 0.05148212485556968,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.5072953935599283,
						"likelihood_diff_stderr,none": 0.4757083158174957,
						"pct_stereotype,none": 0.6082289803220036,
						"pct_stereotype_stderr,none": 0.07144784495390717
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.09547244094488189,
						"exact_match_stderr,none": 0.006520711009195213
					},
					"glue": {
						"acc,none": 0.7348502124024592,
						"acc_stderr,none": 0.0468744128770656,
						"alias": "glue",
						"f1,none": 0.7040082090049717,
						"f1_stderr,none": 0.00013342827799553273,
						"mcc,none": 0.184332205590133,
						"mcc_stderr,none": 0.0009419849462692373
					},
					"kmmlu": {
						"acc,none": 0.2808836269130812,
						"acc_norm,none": 0.2808836269130812,
						"acc_norm_stderr,none": 0.02674060160636413,
						"acc_stderr,none": 0.02674060160636413,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5172111379083534,
						"acc_norm,none": 0.544,
						"acc_norm_stderr,none": 0.0004971222444889771,
						"acc_stderr,none": 0.04469108217861228,
						"alias": "kobest",
						"f1,none": 0.41012434242159046,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7113332039588589,
						"acc_stderr,none": 0.01779269544408024,
						"alias": "lambada",
						"perplexity,none": 3.660483559100401,
						"perplexity_stderr,none": 0.19494371140569863
					},
					"lambada_cloze": {
						"acc,none": 0.03745391034348923,
						"acc_stderr,none": 0.004776260291674254,
						"alias": "lambada_cloze",
						"perplexity,none": 477.9198437282575,
						"perplexity_stderr,none": 87.01131983834603
					},
					"lambada_multilingual": {
						"acc,none": 0.5352222006598097,
						"acc_stderr,none": 0.08140089683691314,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.45243675733278,
						"perplexity_stderr,none": 8.33025760334343
					},
					"mmlu": {
						"acc,none": 0.3844893889759294,
						"acc_stderr,none": 0.08040959032165233,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3657810839532412,
						"acc_stderr,none": 0.07886014679203526,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.4419053749597683,
						"acc_stderr,none": 0.08132507964777667,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4111147221319467,
						"acc_stderr,none": 0.06654014082338813,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3298445924516334,
						"acc_stderr,none": 0.06859159843243573,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.37899219304471254,
						"acc_norm,none": 0.3440985335075257,
						"acc_norm_stderr,none": 9.877461320424378e-05,
						"acc_stderr,none": 0.08870456147105794,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.48714285714285716,
						"acc_stderr,none": 0.05137672671209161,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7308643079843239,
						"acc_norm,none": 0.6459717933934752,
						"acc_norm_stderr,none": 0.010083318731700059,
						"acc_stderr,none": 0.15474716422240425,
						"alias": "pythia",
						"bits_per_byte,none": 0.6344802033311808,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5523783341543202,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3008116551552837,
						"perplexity_stderr,none": 0.06414249881834119,
						"word_perplexity,none": 10.503888258986443,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3971631205673759,
						"acc_norm,none": 0.44680851063829785,
						"acc_norm_stderr,none": 0.05925060811761666,
						"acc_stderr,none": 0.04612403510483104,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.7029716149213004,
						"acc_stderr,none": 0.07546280608960973,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.32571179875001505,
						"acc_stderr,none": 0.0013953369872561267,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -7.559174160339107,
						"bleu_diff_stderr,none": 0.8634360451460346,
						"bleu_max,none": 26.72643529006475,
						"bleu_max_stderr,none": 0.8143973045212926,
						"rouge1_acc,none": 0.2741738066095471,
						"rouge1_acc_stderr,none": 0.015616518497219357,
						"rouge1_diff,none": -9.973904804411875,
						"rouge1_diff_stderr,none": 0.931770222940795,
						"rouge1_max,none": 51.683375699575556,
						"rouge1_max_stderr,none": 0.8848460189809442,
						"rouge2_acc,none": 0.2521419828641371,
						"rouge2_acc_stderr,none": 0.015201522246299969,
						"rouge2_diff,none": -11.726788386174697,
						"rouge2_diff_stderr,none": 1.1297957792834203,
						"rouge2_max,none": 35.57191977539582,
						"rouge2_max_stderr,none": 1.039100559069391,
						"rougeL_acc,none": 0.28151774785801714,
						"rougeL_acc_stderr,none": 0.015744027248256055,
						"rougeL_diff,none": -10.319052971320211,
						"rougeL_diff_stderr,none": 0.9485641276203709,
						"rougeL_max,none": 48.85601562983841,
						"rougeL_max_stderr,none": 0.904288267562164
					},
					"xcopa": {
						"acc,none": 0.6192727272727273,
						"acc_stderr,none": 0.06832770339193,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43502008032128514,
						"acc_stderr,none": 0.05228323874212457,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6257746224655556,
						"acc_stderr,none": 0.06171346989644983,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8204090806922903,
						"acc_stderr,none": 0.035584246762786625,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.641206313416009,
						"acc_norm,none": 0.6420518602029313,
						"acc_norm_stderr,none": 0.08959458082391243,
						"acc_stderr,none": 0.1071249724648545,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4840625,
						"acc_stderr,none": 0.051436171869769835,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.015560917136921665,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.441,
						"acc_stderr,none": 0.01570877989424268,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.43166666666666664,
						"acc_stderr,none": 0.01430428747484826,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.41467576791808874,
						"acc_norm,none": 0.45307167235494883,
						"acc_norm_stderr,none": 0.014546892052005631,
						"acc_stderr,none": 0.014397070564409174,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7529461279461279,
						"acc_norm,none": 0.7352693602693603,
						"acc_norm_stderr,none": 0.009053021086173967,
						"acc_stderr,none": 0.008850055161459236,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0733,
						"acc_stderr,none": 0.051768802018898344,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0515,
						"acc_stderr,none": 0.00494328767588157,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.2045,
						"acc_stderr,none": 0.009021117740205037,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0935,
						"acc_stderr,none": 0.006511534000335063,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0325,
						"acc_stderr,none": 0.00396607360873882,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.015,
						"acc_stderr,none": 0.0027186753387999554,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.126,
						"acc_stderr,none": 0.007422233139182952,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0035,
						"acc_stderr,none": 0.0013208888574315764,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.129,
						"acc_stderr,none": 0.007497173054018469,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521609,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0745,
						"acc_stderr,none": 0.005872999324070262,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.005639913232104121,
						"acc_stderr,none": 0.0015601516534579525,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8083134328358208,
						"acc_stderr,none": 0.16512647975167588,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653866,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910625,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.011598902298689004,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.782,
						"acc_stderr,none": 0.013063179040595282,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.015719768163402086,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.01110798754893915,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992441,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030084,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244054,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817153,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.007088105617246443,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118585,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656795,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.010131468138756986,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.665,
						"acc_stderr,none": 0.014933117490932575,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.014683991951087964,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.743,
						"acc_stderr,none": 0.013825416526895038,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286411,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.01020686926438178,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178344,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.406,
						"acc_stderr,none": 0.015537226438634597,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662761,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.0122851913263867,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.015654426245029288,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.705,
						"acc_stderr,none": 0.014428554438445517,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118581,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855754,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244055,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400234,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.669,
						"acc_stderr,none": 0.014888272588203934,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340992,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.319,
						"acc_stderr,none": 0.01474640486547349,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.015615500115072956,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.519,
						"acc_stderr,none": 0.01580787426850585,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.824,
						"acc_stderr,none": 0.012048616898597517,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.549,
						"acc_stderr,none": 0.015743152379585526,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024973,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621231,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.758,
						"acc_stderr,none": 0.013550631705555965,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942303,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448795,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651537,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.011884495834541677,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.01578686875935901,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286426,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.010878848714333322,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910611,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.691,
						"acc_stderr,none": 0.014619600977206491,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.444,
						"acc_stderr,none": 0.01571976816340209,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.01064016979249935,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662735,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.592,
						"acc_stderr,none": 0.015549205052920676,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.011107987548939149,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343968,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.667,
						"acc_stderr,none": 0.014910846164229863,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665549,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697071,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523722,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318222,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275288,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.353,
						"acc_stderr,none": 0.01512017260548369,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.298,
						"acc_stderr,none": 0.014470846741134701,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7195718654434251,
						"acc_stderr,none": 0.007856704488285302,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8392857142857143,
						"acc_stderr,none": 0.049522300593062965,
						"alias": "cb",
						"f1,none": 0.6750864689235928,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.27488855869242196,
						"acc_norm,none": 0.27488855869242196,
						"acc_norm_stderr,none": 0.12255744543819905,
						"acc_stderr,none": 0.12255744543819905,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.0659529705144534,
						"acc_stderr,none": 0.0659529705144534,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.05781449705557245,
						"acc_stderr,none": 0.05781449705557245,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.1042572070285374,
						"acc_stderr,none": 0.1042572070285374,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.07497837474124878,
						"acc_stderr,none": 0.07497837474124878,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.07099970268936748,
						"acc_stderr,none": 0.07099970268936748,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2952857882921776,
						"acc_norm,none": 0.2952857882921776,
						"acc_norm_stderr,none": 0.05148212485556968,
						"acc_stderr,none": 0.05148212485556968,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2958579881656805,
						"acc_norm,none": 0.2958579881656805,
						"acc_norm_stderr,none": 0.035214144124964784,
						"acc_stderr,none": 0.035214144124964784,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.03839344480212195,
						"acc_stderr,none": 0.03839344480212195,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0347769116216366,
						"acc_stderr,none": 0.0347769116216366,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.35406698564593303,
						"acc_norm,none": 0.35406698564593303,
						"acc_norm_stderr,none": 0.03315925698294869,
						"acc_stderr,none": 0.03315925698294869,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2748091603053435,
						"acc_norm,none": 0.2748091603053435,
						"acc_norm_stderr,none": 0.03915345408847836,
						"acc_stderr,none": 0.03915345408847836,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.33088235294117646,
						"acc_norm,none": 0.33088235294117646,
						"acc_norm_stderr,none": 0.04049684225945662,
						"acc_stderr,none": 0.04049684225945662,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3364485981308411,
						"acc_norm,none": 0.3364485981308411,
						"acc_norm_stderr,none": 0.045892711114716274,
						"acc_stderr,none": 0.045892711114716274,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.33436532507739936,
						"acc_norm,none": 0.33436532507739936,
						"acc_norm_stderr,none": 0.026290609195557965,
						"acc_stderr,none": 0.026290609195557965,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.031980016601150726,
						"acc_stderr,none": 0.031980016601150726,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.35195530726256985,
						"acc_norm,none": 0.35195530726256985,
						"acc_norm_stderr,none": 0.03579614323524846,
						"acc_stderr,none": 0.03579614323524846,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24050632911392406,
						"acc_norm,none": 0.24050632911392406,
						"acc_norm_stderr,none": 0.027820781981149675,
						"acc_stderr,none": 0.027820781981149675,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.040225592469367126,
						"acc_stderr,none": 0.040225592469367126,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004222,
						"acc_stderr,none": 0.04742907046004222,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.040191074725573483,
						"acc_stderr,none": 0.040191074725573483,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.0423247353205504,
						"acc_stderr,none": 0.0423247353205504,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.26373626373626374,
						"acc_norm,none": 0.26373626373626374,
						"acc_norm_stderr,none": 0.026718814072967535,
						"acc_stderr,none": 0.026718814072967535,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.03283472056108567,
						"acc_stderr,none": 0.03283472056108567,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.27485380116959063,
						"acc_norm,none": 0.27485380116959063,
						"acc_norm_stderr,none": 0.034240429246915824,
						"acc_stderr,none": 0.034240429246915824,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.0373874230421581,
						"acc_stderr,none": 0.0373874230421581,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.03694846055443904,
						"acc_stderr,none": 0.03694846055443904,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.03652215878407506,
						"acc_stderr,none": 0.03652215878407506,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3374233128834356,
						"acc_norm,none": 0.3374233128834356,
						"acc_norm_stderr,none": 0.03714908409935573,
						"acc_stderr,none": 0.03714908409935573,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.034078261673374376,
						"acc_stderr,none": 0.034078261673374376,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.028746730632681374,
						"acc_stderr,none": 0.028746730632681374,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.031156269519646847,
						"acc_stderr,none": 0.031156269519646847,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.40756302521008403,
						"acc_norm,none": 0.40756302521008403,
						"acc_norm_stderr,none": 0.03191863374478465,
						"acc_stderr,none": 0.03191863374478465,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2391304347826087,
						"acc_norm,none": 0.2391304347826087,
						"acc_norm_stderr,none": 0.028187385293933942,
						"acc_stderr,none": 0.028187385293933942,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3006993006993007,
						"acc_norm,none": 0.3006993006993007,
						"acc_norm_stderr,none": 0.03848167949490064,
						"acc_stderr,none": 0.03848167949490064,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.03448901746724545,
						"acc_stderr,none": 0.03448901746724545,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.28859060402684567,
						"acc_norm,none": 0.28859060402684567,
						"acc_norm_stderr,none": 0.03724517629698769,
						"acc_stderr,none": 0.03724517629698769,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2711864406779661,
						"acc_norm,none": 0.2711864406779661,
						"acc_norm_stderr,none": 0.041100705493392085,
						"acc_stderr,none": 0.041100705493392085,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.034450002891734596,
						"acc_stderr,none": 0.034450002891734596,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03873144730600103,
						"acc_stderr,none": 0.03873144730600103,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.0404061017820884,
						"acc_stderr,none": 0.0404061017820884,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.032314709966177586,
						"acc_stderr,none": 0.032314709966177586,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3372093023255814,
						"acc_norm,none": 0.3372093023255814,
						"acc_norm_stderr,none": 0.03615263198871638,
						"acc_stderr,none": 0.03615263198871638,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2725060827250608,
						"acc_norm,none": 0.2725060827250608,
						"acc_norm_stderr,none": 0.021989272196105043,
						"acc_stderr,none": 0.021989272196105043,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.37383177570093457,
						"acc_norm,none": 0.37383177570093457,
						"acc_norm_stderr,none": 0.03315078506050491,
						"acc_stderr,none": 0.03315078506050491,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.04119323030208568,
						"acc_stderr,none": 0.04119323030208568,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.29508196721311475,
						"acc_norm,none": 0.29508196721311475,
						"acc_norm_stderr,none": 0.04146178164901212,
						"acc_stderr,none": 0.04146178164901212,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3238095238095238,
						"acc_norm,none": 0.3238095238095238,
						"acc_norm_stderr,none": 0.032367278954043524,
						"acc_stderr,none": 0.032367278954043524,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.034251778896020865,
						"acc_stderr,none": 0.034251778896020865,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31216931216931215,
						"acc_norm,none": 0.31216931216931215,
						"acc_norm_stderr,none": 0.03379535035917228,
						"acc_stderr,none": 0.03379535035917228,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.28448275862068967,
						"acc_norm,none": 0.28448275862068967,
						"acc_norm_stderr,none": 0.0420716075558402,
						"acc_stderr,none": 0.0420716075558402,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.037245636197746325,
						"acc_stderr,none": 0.037245636197746325,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3238095238095238,
						"acc_norm,none": 0.3238095238095238,
						"acc_norm_stderr,none": 0.04588414718067474,
						"acc_stderr,none": 0.04588414718067474,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.0340385177358705,
						"acc_stderr,none": 0.0340385177358705,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26066350710900477,
						"acc_norm,none": 0.26066350710900477,
						"acc_norm_stderr,none": 0.030293645661742804,
						"acc_stderr,none": 0.030293645661742804,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2579787234042553,
						"acc_norm,none": 0.2579787234042553,
						"acc_norm_stderr,none": 0.022593550801056263,
						"acc_stderr,none": 0.022593550801056263,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.33189655172413796,
						"acc_norm,none": 0.33189655172413796,
						"acc_norm_stderr,none": 0.03098255553570088,
						"acc_stderr,none": 0.03098255553570088,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3390804597701149,
						"acc_norm,none": 0.3390804597701149,
						"acc_norm_stderr,none": 0.03599172203897235,
						"acc_stderr,none": 0.03599172203897235,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03944624162501116,
						"acc_stderr,none": 0.03944624162501116,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3141592920353982,
						"acc_norm,none": 0.3141592920353982,
						"acc_norm_stderr,none": 0.030945344741493037,
						"acc_stderr,none": 0.030945344741493037,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03681050869161551,
						"acc_stderr,none": 0.03681050869161551,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.032530209055933366,
						"acc_stderr,none": 0.032530209055933366,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2781065088757396,
						"acc_norm,none": 0.2781065088757396,
						"acc_norm_stderr,none": 0.034569054303762434,
						"acc_stderr,none": 0.034569054303762434,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.03637652289278585,
						"acc_stderr,none": 0.03637652289278585,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.325,
						"acc_norm,none": 0.325,
						"acc_norm_stderr,none": 0.03714454174077367,
						"acc_stderr,none": 0.03714454174077367,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.184332205590133,
						"mcc_stderr,none": 0.030691773266939747
					},
					"copa": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.03588702812826371,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.5072953935599283,
						"likelihood_diff_stderr,none": 0.4757083158174957,
						"pct_stereotype,none": 0.6082289803220036,
						"pct_stereotype_stderr,none": 0.07144784495390717
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.673598688133572,
						"likelihood_diff_stderr,none": 0.08686262734295087,
						"pct_stereotype,none": 0.6386404293381037,
						"pct_stereotype_stderr,none": 0.011734402417305046
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.2239010989010985,
						"likelihood_diff_stderr,none": 0.39220073323979493,
						"pct_stereotype,none": 0.6813186813186813,
						"pct_stereotype_stderr,none": 0.049117041148312765
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.863636363636363,
						"likelihood_diff_stderr,none": 1.8088681909501876,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.857692307692307,
						"likelihood_diff_stderr,none": 0.6339496070587478,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.65234375,
						"likelihood_diff_stderr,none": 0.15974475723056403,
						"pct_stereotype,none": 0.64375,
						"pct_stereotype_stderr,none": 0.02681271031002423
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.5966435185185186,
						"likelihood_diff_stderr,none": 0.23779411688067553,
						"pct_stereotype,none": 0.5694444444444444,
						"pct_stereotype_stderr,none": 0.03376922151252336
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.8055555555555554,
						"likelihood_diff_stderr,none": 0.32295584050672566,
						"pct_stereotype,none": 0.7361111111111112,
						"pct_stereotype_stderr,none": 0.05230618728513983
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.4712106299212597,
						"likelihood_diff_stderr,none": 0.15028877994009315,
						"pct_stereotype,none": 0.5452755905511811,
						"pct_stereotype_stderr,none": 0.02211455387069532
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.6171171171171173,
						"likelihood_diff_stderr,none": 0.34503275580444254,
						"pct_stereotype,none": 0.7297297297297297,
						"pct_stereotype_stderr,none": 0.042343213610845386
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.008064516129032,
						"likelihood_diff_stderr,none": 0.44856485759007997,
						"pct_stereotype,none": 0.9032258064516129,
						"pct_stereotype_stderr,none": 0.03082364793244869
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.21578947368421,
						"likelihood_diff_stderr,none": 0.24841092750724766,
						"pct_stereotype,none": 0.6736842105263158,
						"pct_stereotype_stderr,none": 0.034104864353344894
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.3411970781156826,
						"likelihood_diff_stderr,none": 0.07675626704013626,
						"pct_stereotype,none": 0.5778175313059034,
						"pct_stereotype_stderr,none": 0.01206447555010972
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.4055555555555554,
						"likelihood_diff_stderr,none": 0.32331812432321666,
						"pct_stereotype,none": 0.5555555555555556,
						"pct_stereotype_stderr,none": 0.052671718126664185
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.5576923076923075,
						"likelihood_diff_stderr,none": 0.7026471502478888,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.8106060606060606,
						"likelihood_diff_stderr,none": 0.42218009385228294,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.05615974350262315
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.8243769470404985,
						"likelihood_diff_stderr,none": 0.12868128233726245,
						"pct_stereotype,none": 0.5638629283489096,
						"pct_stereotype_stderr,none": 0.027721918583423183
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.594367588932806,
						"likelihood_diff_stderr,none": 0.212217235421508,
						"pct_stereotype,none": 0.4150197628458498,
						"pct_stereotype_stderr,none": 0.031038785215783238
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.6180555555555554,
						"likelihood_diff_stderr,none": 0.4782017796428157,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.05594542388644592
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.9152173913043478,
						"likelihood_diff_stderr,none": 0.13908528560792635,
						"pct_stereotype,none": 0.5108695652173914,
						"pct_stereotype_stderr,none": 0.023332486098156545
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.514130434782609,
						"likelihood_diff_stderr,none": 0.2873167725149044,
						"pct_stereotype,none": 0.7565217391304347,
						"pct_stereotype_stderr,none": 0.04019651260878071
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.8873626373626373,
						"likelihood_diff_stderr,none": 0.3464512163395744,
						"pct_stereotype,none": 0.7362637362637363,
						"pct_stereotype_stderr,none": 0.04644942852497395
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.926658163265306,
						"likelihood_diff_stderr,none": 0.25929975724072796,
						"pct_stereotype,none": 0.7244897959183674,
						"pct_stereotype_stderr,none": 0.03199393624667903
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.09547244094488189,
						"exact_match_stderr,none": 0.006520711009195213
					},
					"glue": {
						"acc,none": 0.7348502124024592,
						"acc_stderr,none": 0.0468744128770656,
						"alias": "glue",
						"f1,none": 0.7040082090049717,
						"f1_stderr,none": 0.00013342827799553273,
						"mcc,none": 0.184332205590133,
						"mcc_stderr,none": 0.0009419849462692373
					},
					"hellaswag": {
						"acc,none": 0.5360485958972316,
						"acc_norm,none": 0.7257518422624976,
						"acc_norm_stderr,none": 0.004452228541043546,
						"acc_stderr,none": 0.004976796060456436,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2808836269130812,
						"acc_norm,none": 0.2808836269130812,
						"acc_norm_stderr,none": 0.02674060160636413,
						"acc_stderr,none": 0.02674060160636413,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.040936018074033256,
						"acc_stderr,none": 0.040936018074033256,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740666,
						"acc_stderr,none": 0.014142984975740666,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.014326941797231563,
						"acc_stderr,none": 0.014326941797231563,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.253,
						"acc_norm,none": 0.253,
						"acc_norm_stderr,none": 0.01375427861358708,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729482,
						"acc_stderr,none": 0.014013292702729482,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.25666666666666665,
						"acc_norm,none": 0.25666666666666665,
						"acc_norm_stderr,none": 0.017846913889347043,
						"acc_stderr,none": 0.017846913889347043,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.284,
						"acc_norm,none": 0.284,
						"acc_norm_stderr,none": 0.014267009061031309,
						"acc_stderr,none": 0.014267009061031309,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.338,
						"acc_norm,none": 0.338,
						"acc_norm_stderr,none": 0.014965960710224485,
						"acc_stderr,none": 0.014965960710224485,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361425,
						"acc_stderr,none": 0.014498627873361425,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.03182868716477582,
						"acc_stderr,none": 0.03182868716477582,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.303,
						"acc_norm,none": 0.303,
						"acc_norm_stderr,none": 0.014539683710535262,
						"acc_stderr,none": 0.014539683710535262,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2153846153846154,
						"acc_norm,none": 0.2153846153846154,
						"acc_norm_stderr,none": 0.03619435936612662,
						"acc_stderr,none": 0.03619435936612662,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.014428554438445509,
						"acc_stderr,none": 0.014428554438445509,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.318,
						"acc_norm,none": 0.318,
						"acc_norm_stderr,none": 0.014734079309311901,
						"acc_stderr,none": 0.014734079309311901,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.01382541652689502,
						"acc_stderr,none": 0.01382541652689502,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.014127086556490528,
						"acc_stderr,none": 0.014127086556490528,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.013912208651021343,
						"acc_stderr,none": 0.013912208651021343,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.014205696104091515,
						"acc_stderr,none": 0.014205696104091515,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.286,
						"acc_norm,none": 0.286,
						"acc_norm_stderr,none": 0.014297146862517908,
						"acc_stderr,none": 0.014297146862517908,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.311,
						"acc_norm,none": 0.311,
						"acc_norm_stderr,none": 0.014645596385722695,
						"acc_stderr,none": 0.014645596385722695,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.044619604333847394,
						"acc_stderr,none": 0.044619604333847394,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.01435639599990569,
						"acc_stderr,none": 0.01435639599990569,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.014566646394664392,
						"acc_stderr,none": 0.014566646394664392,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740671,
						"acc_stderr,none": 0.014142984975740671,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796263,
						"acc_stderr,none": 0.013996674851796263,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.236,
						"acc_norm,none": 0.236,
						"acc_norm_stderr,none": 0.013434451402438676,
						"acc_stderr,none": 0.013434451402438676,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.01803238600153009,
						"acc_stderr,none": 0.01803238600153009,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145158,
						"acc_stderr,none": 0.013979965645145158,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.01423652621529135,
						"acc_stderr,none": 0.01423652621529135,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.284,
						"acc_norm,none": 0.284,
						"acc_norm_stderr,none": 0.01426700906103131,
						"acc_stderr,none": 0.01426700906103131,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.014370995982377947,
						"acc_stderr,none": 0.014370995982377947,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.025574048533225632,
						"acc_stderr,none": 0.025574048533225632,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.01389503767796513,
						"acc_stderr,none": 0.01389503767796513,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361425,
						"acc_stderr,none": 0.014498627873361425,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.284,
						"acc_norm,none": 0.284,
						"acc_norm_stderr,none": 0.014267009061031309,
						"acc_stderr,none": 0.014267009061031309,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.03109395714370027,
						"acc_stderr,none": 0.03109395714370027,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.013912208651021354,
						"acc_stderr,none": 0.013912208651021354,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796273,
						"acc_stderr,none": 0.013996674851796273,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.030275120389073044,
						"acc_stderr,none": 0.030275120389073044,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.339,
						"acc_norm,none": 0.339,
						"acc_norm_stderr,none": 0.014976758771620342,
						"acc_stderr,none": 0.014976758771620342,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5172111379083534,
						"acc_norm,none": 0.544,
						"acc_norm_stderr,none": 0.0004971222444889771,
						"acc_stderr,none": 0.04469108217861228,
						"alias": "kobest",
						"f1,none": 0.41012434242159046,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5035612535612536,
						"acc_stderr,none": 0.013348428901951046,
						"alias": " - kobest_boolq",
						"f1,none": 0.3387092305457101,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": " - kobest_copa",
						"f1,none": 0.6174817852054806,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.422,
						"acc_norm,none": 0.544,
						"acc_norm_stderr,none": 0.02229623834840705,
						"acc_stderr,none": 0.02210903931061855,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.41706352143838266,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5214105793450882,
						"acc_stderr,none": 0.025102898696363056,
						"alias": " - kobest_sentineg",
						"f1,none": 0.39228166290686434,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7113332039588589,
						"acc_stderr,none": 0.01779269544408024,
						"alias": "lambada",
						"perplexity,none": 3.660483559100401,
						"perplexity_stderr,none": 0.19494371140569863
					},
					"lambada_cloze": {
						"acc,none": 0.03745391034348923,
						"acc_stderr,none": 0.004776260291674254,
						"alias": "lambada_cloze",
						"perplexity,none": 477.9198437282575,
						"perplexity_stderr,none": 87.01131983834603
					},
					"lambada_multilingual": {
						"acc,none": 0.5352222006598097,
						"acc_stderr,none": 0.08140089683691314,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.45243675733278,
						"perplexity_stderr,none": 8.33025760334343
					},
					"lambada_openai": {
						"acc,none": 0.7436444789443043,
						"acc_stderr,none": 0.0060829758093544775,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3008116551552837,
						"perplexity_stderr,none": 0.06414249881834119
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.029497380166893072,
						"acc_stderr,none": 0.0023572310419656068,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 306.7863677911609,
						"perplexity_stderr,none": 9.555652931674876
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4248010867455851,
						"acc_stderr,none": 0.006886743547830464,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 35.7142988717323,
						"perplexity_stderr,none": 1.992657890176096
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7448088492140501,
						"acc_stderr,none": 0.006073895168086378,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.299548219178941,
						"perplexity_stderr,none": 0.06425208321921382
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45429846691247816,
						"acc_stderr,none": 0.006936817923963303,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.046419162838276,
						"perplexity_stderr,none": 1.4238540019889632
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5427906074131574,
						"acc_stderr,none": 0.006940420862895475,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.775366160920893,
						"perplexity_stderr,none": 0.8187366687298445
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5094119930137784,
						"acc_stderr,none": 0.006964743377047839,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.42655137199348,
						"perplexity_stderr,none": 1.196853641944478
					},
					"lambada_standard": {
						"acc,none": 0.6780516204152921,
						"acc_stderr,none": 0.006509334262746002,
						"alias": " - lambada_standard",
						"perplexity,none": 4.021462189143243,
						"perplexity_stderr,none": 0.08207379424512996
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.04541044052008539,
						"acc_stderr,none": 0.0029006696462560086,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 649.053319665354,
						"perplexity_stderr,none": 20.147531425703995
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.3276081424936387,
						"exact_match_stderr,get-answer": 0.011841329714669961
					},
					"logiqa": {
						"acc,none": 0.24731182795698925,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.017803862148538012,
						"acc_stderr,none": 0.01692284244671239,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.25572519083969464,
						"acc_norm,none": 0.2926208651399491,
						"acc_norm_stderr,none": 0.011478646336639113,
						"acc_stderr,none": 0.011006907662987134,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25661641541038527,
						"acc_norm,none": 0.2606365159128978,
						"acc_norm_stderr,none": 0.008036134931668057,
						"acc_stderr,none": 0.007995567445627896,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.4325354797712349,
						"acc_stderr,none": 0.005098838148230505,
						"alias": "mc_taco",
						"f1,none": 0.5348150720611218,
						"f1_stderr,none": 0.005641087796231545
					},
					"medmcqa": {
						"acc,none": 0.3435333492708582,
						"acc_norm,none": 0.3435333492708582,
						"acc_norm_stderr,none": 0.0073434268263329674,
						"acc_stderr,none": 0.0073434268263329674,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.34171249018067557,
						"acc_norm,none": 0.34171249018067557,
						"acc_norm_stderr,none": 0.01329825567352678,
						"acc_stderr,none": 0.01329825567352678,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.3844893889759294,
						"acc_stderr,none": 0.08040959032165233,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322695,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4148148148148148,
						"acc_stderr,none": 0.04256193767901408,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3684210526315789,
						"acc_stderr,none": 0.03925523381052932,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.39622641509433965,
						"acc_stderr,none": 0.030102793781791197,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4236111111111111,
						"acc_stderr,none": 0.04132125019723369,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.04793724854411018,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.044084400227680814,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3236994219653179,
						"acc_stderr,none": 0.035676037996391706,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171452,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.04943110704237102,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.34893617021276596,
						"acc_stderr,none": 0.031158522131357773,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.03999423879281337,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.3724137931034483,
						"acc_stderr,none": 0.0402873153294756,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.02345603738398202,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04216370213557835,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.43548387096774194,
						"acc_stderr,none": 0.028206225591502737,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2561576354679803,
						"acc_stderr,none": 0.030712730070982592,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.049431107042371025,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.503030303030303,
						"acc_stderr,none": 0.03904272341431857,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.42424242424242425,
						"acc_stderr,none": 0.03521224908841583,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5181347150259067,
						"acc_stderr,none": 0.0360606500183292,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3487179487179487,
						"acc_stderr,none": 0.02416278028401772,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.28888888888888886,
						"acc_stderr,none": 0.027634907264178544,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3445378151260504,
						"acc_stderr,none": 0.030868682604121626,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.25165562913907286,
						"acc_stderr,none": 0.035433042343899844,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.47706422018348627,
						"acc_stderr,none": 0.0214147570581755,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.24537037037037038,
						"acc_stderr,none": 0.029346665094372948,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4950980392156863,
						"acc_stderr,none": 0.035091433756067866,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5147679324894515,
						"acc_stderr,none": 0.032533028078777386,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.43946188340807174,
						"acc_stderr,none": 0.03331092511038179,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.48091603053435117,
						"acc_stderr,none": 0.04382094705550988,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3657810839532412,
						"acc_stderr,none": 0.07886014679203526,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4214876033057851,
						"acc_stderr,none": 0.04507732278775094,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3888888888888889,
						"acc_stderr,none": 0.047128212574267705,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.38650306748466257,
						"acc_stderr,none": 0.038258255488486076,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.35714285714285715,
						"acc_stderr,none": 0.04547960999764376,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.44660194174757284,
						"acc_stderr,none": 0.04922424153458935,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6068376068376068,
						"acc_stderr,none": 0.03199957924651047,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001974,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5747126436781609,
						"acc_stderr,none": 0.017679225489431453,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3439306358381503,
						"acc_stderr,none": 0.02557412378654666,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.22569832402234638,
						"acc_stderr,none": 0.01398139505845505,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3790849673202614,
						"acc_stderr,none": 0.027780141207023334,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.4419053749597683,
						"acc_stderr,none": 0.08132507964777667,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.48231511254019294,
						"acc_stderr,none": 0.02838032284907713,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4444444444444444,
						"acc_stderr,none": 0.027648477877413327,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.26595744680851063,
						"acc_stderr,none": 0.026358065698880582,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3285528031290743,
						"acc_stderr,none": 0.011996027247502912,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4264705882352941,
						"acc_stderr,none": 0.03004261583271486,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.380718954248366,
						"acc_stderr,none": 0.019643801557924806,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.45454545454545453,
						"acc_stderr,none": 0.04769300568972742,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3183673469387755,
						"acc_stderr,none": 0.02982253379398207,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4111147221319467,
						"acc_stderr,none": 0.06654014082338813,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5323383084577115,
						"acc_stderr,none": 0.035281314729336065,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3298445924516334,
						"acc_stderr,none": 0.06859159843243573,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.05,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.0371172519074075,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.5730994152046783,
						"acc_stderr,none": 0.03793620616529917,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7857361181864493,
						"acc_stderr,none": 0.0041418066019842,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7842758340113913,
						"acc_stderr,none": 0.00414844147806004,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7254901960784313,
						"acc_stderr,none": 0.022120630385010484,
						"alias": "mrpc",
						"f1,none": 0.8287461773700305,
						"f1_stderr,none": 0.01593740287464065
					},
					"multimedqa": {
						"acc,none": 0.37899219304471254,
						"acc_norm,none": 0.3440985335075257,
						"acc_norm_stderr,none": 9.877461320424378e-05,
						"acc_stderr,none": 0.08870456147105794,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.47174092409240925,
						"acc_stderr,none": 0.007170323509165312,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7023137713999027,
						"mrr_stderr,none": 0.010381389965412494,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.41196388261851014,
						"r@2_stderr,none": 0.016544739619609432
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6539691513063676,
						"mrr_stderr,none": 0.010439050673320804,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4683972911963883,
						"r@2_stderr,none": 0.016773710557640358
					},
					"openbookqa": {
						"acc,none": 0.296,
						"acc_norm,none": 0.414,
						"acc_norm_stderr,none": 0.02204949796982786,
						"acc_stderr,none": 0.020435342091896146,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4455,
						"acc_stderr,none": 0.011116504096687399,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.377,
						"acc_stderr,none": 0.01083947633068883,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.444,
						"acc_stderr,none": 0.011112774040420282,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.011135708419359796,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.549,
						"acc_stderr,none": 0.01112930504188632,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.011147292544180015,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5085,
						"acc_stderr,none": 0.011181519941139164,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48714285714285716,
						"acc_stderr,none": 0.05137672671209161,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7763873775843307,
						"acc_norm,none": 0.7861806311207835,
						"acc_norm_stderr,none": 0.009565994206915597,
						"acc_stderr,none": 0.009721489519176297,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.24685098206660974,
						"acc_norm,none": 0.29253842869342445,
						"acc_norm_stderr,none": 0.0033236557924346422,
						"acc_stderr,none": 0.003150148578593876,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.02031317923174519,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7308643079843239,
						"acc_norm,none": 0.6459717933934752,
						"acc_norm_stderr,none": 0.010083318731700059,
						"acc_stderr,none": 0.15474716422240425,
						"alias": "pythia",
						"bits_per_byte,none": 0.6344802033311808,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5523783341543202,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3008116551552837,
						"perplexity_stderr,none": 0.06414249881834119,
						"word_perplexity,none": 10.503888258986443,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3971631205673759,
						"acc_norm,none": 0.44680851063829785,
						"acc_norm_stderr,none": 0.05925060811761666,
						"acc_stderr,none": 0.04612403510483104,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4666666666666667,
						"acc_norm,none": 0.5583333333333333,
						"acc_norm_stderr,none": 0.045521924002535574,
						"acc_stderr,none": 0.0457329560380023,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.35,
						"acc_norm,none": 0.4625,
						"acc_norm_stderr,none": 0.039540899134978144,
						"acc_stderr,none": 0.037826149818120415,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.39436619718309857,
						"acc_norm,none": 0.3908450704225352,
						"acc_norm_stderr,none": 0.02900500756990982,
						"acc_stderr,none": 0.029051039507650152,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7475389562206283,
						"acc_stderr,none": 0.002160566320808598,
						"alias": "qqp",
						"f1,none": 0.7023590820284022,
						"f1_stderr,none": 0.002814250421497814
					},
					"race": {
						"acc,none": 0.3406698564593301,
						"acc_stderr,none": 0.014667904380876565,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2722,
						"em_stderr,none": 0.004451145613179883,
						"f1,none": 0.28175857167243956,
						"f1_stderr,none": 0.004461809059583712
					},
					"rte": {
						"acc,none": 0.6895306859205776,
						"acc_stderr,none": 0.02785041039263069,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.957,
						"acc_norm,none": 0.957,
						"acc_norm_stderr,none": 0.006418114379799741,
						"acc_stderr,none": 0.006418114379799741,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6895306859205776,
						"acc_stderr,none": 0.02785041039263069,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8795871559633027,
						"acc_stderr,none": 0.011027238534594332,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5783764870538838,
						"acc_norm,none": 0.7692692192342298,
						"acc_norm_stderr,none": 0.0029786718948360937,
						"acc_stderr,none": 0.0034913906773147664,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.7029716149213004,
						"acc_stderr,none": 0.07546280608960973,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6393229166666666,
						"acc_stderr,none": 0.004806056532555753,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.9038208168642952,
						"acc_stderr,none": 0.0029683229948045766,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5709803921568627,
						"acc_stderr,none": 0.004900838285087907,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.32571179875001505,
						"acc_stderr,none": 0.0013953369872561267,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -7.559174160339107,
						"bleu_diff_stderr,none": 0.8634360451460346,
						"bleu_max,none": 26.72643529006475,
						"bleu_max_stderr,none": 0.8143973045212926,
						"rouge1_acc,none": 0.2741738066095471,
						"rouge1_acc_stderr,none": 0.015616518497219357,
						"rouge1_diff,none": -9.973904804411875,
						"rouge1_diff_stderr,none": 0.931770222940795,
						"rouge1_max,none": 51.683375699575556,
						"rouge1_max_stderr,none": 0.8848460189809442,
						"rouge2_acc,none": 0.2521419828641371,
						"rouge2_acc_stderr,none": 0.015201522246299969,
						"rouge2_diff,none": -11.726788386174697,
						"rouge2_diff_stderr,none": 1.1297957792834203,
						"rouge2_max,none": 35.57191977539582,
						"rouge2_max_stderr,none": 1.039100559069391,
						"rougeL_acc,none": 0.28151774785801714,
						"rougeL_acc_stderr,none": 0.015744027248256055,
						"rougeL_diff,none": -10.319052971320211,
						"rougeL_diff_stderr,none": 0.9485641276203709,
						"rougeL_max,none": 48.85601562983841,
						"rougeL_max_stderr,none": 0.904288267562164
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -7.559174160339107,
						"bleu_diff_stderr,none": 0.8634360451460346,
						"bleu_max,none": 26.72643529006475,
						"bleu_max_stderr,none": 0.8143973045212926,
						"rouge1_acc,none": 0.2741738066095471,
						"rouge1_acc_stderr,none": 0.015616518497219357,
						"rouge1_diff,none": -9.973904804411875,
						"rouge1_diff_stderr,none": 0.931770222940795,
						"rouge1_max,none": 51.683375699575556,
						"rouge1_max_stderr,none": 0.8848460189809442,
						"rouge2_acc,none": 0.2521419828641371,
						"rouge2_acc_stderr,none": 0.015201522246299969,
						"rouge2_diff,none": -11.726788386174697,
						"rouge2_diff_stderr,none": 1.1297957792834203,
						"rouge2_max,none": 35.57191977539582,
						"rouge2_max_stderr,none": 1.039100559069391,
						"rougeL_acc,none": 0.28151774785801714,
						"rougeL_acc_stderr,none": 0.015744027248256055,
						"rougeL_diff,none": -10.319052971320211,
						"rougeL_diff_stderr,none": 0.9485641276203709,
						"rougeL_max,none": 48.85601562983841,
						"rougeL_max_stderr,none": 0.904288267562164
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25703794369645044,
						"acc_stderr,none": 0.015298077509485083,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.39438565380357976,
						"acc_stderr,none": 0.014050402866406918,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.09547244094488189,
						"exact_match_stderr,none": 0.006520711009195213
					},
					"wic": {
						"acc,none": 0.5893416927899686,
						"acc_stderr,none": 0.019491899937012624,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6344712600387684,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5523687109630344,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.503540071971235,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7040252565114443,
						"acc_stderr,none": 0.012829348226339011,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4788732394366197,
						"acc_stderr,none": 0.05970805879899504,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.40384615384615385,
						"acc_stderr,none": 0.048346889526540184,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8571428571428571,
						"acc_stderr,none": 0.02121744734950014,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6192727272727273,
						"acc_stderr,none": 0.06832770339193,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.022017482578127683,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231336,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.02035437548053008,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.02009995064750323,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.022270877485360437,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.02210903931061855,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.646,
						"acc_stderr,none": 0.021407582047916447,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.02001121929807353,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.02074059653648807,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43502008032128514,
						"acc_stderr,none": 0.05228323874212457,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939166,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.48032128514056227,
						"acc_stderr,none": 0.010014307727112705,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4815261044176707,
						"acc_stderr,none": 0.01001522976835699,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.378714859437751,
						"acc_stderr,none": 0.009722751990000568,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5244979919678715,
						"acc_stderr,none": 0.010010036112667896,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5060240963855421,
						"acc_stderr,none": 0.01002134544404757,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4963855421686747,
						"acc_stderr,none": 0.010021811000966357,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42891566265060244,
						"acc_stderr,none": 0.009920273121045582,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4927710843373494,
						"acc_stderr,none": 0.010021025361119623,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3967871485943775,
						"acc_stderr,none": 0.009806220246670022,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41004016064257026,
						"acc_stderr,none": 0.009858525713807855,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4506024096385542,
						"acc_stderr,none": 0.009973042774811681,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40441767068273093,
						"acc_stderr,none": 0.009837245625453003,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.39558232931726905,
						"acc_stderr,none": 0.009801094347134977,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3441767068273092,
						"acc_stderr,none": 0.00952295446980604,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6257746224655556,
						"acc_stderr,none": 0.06171346989644983,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5843812045003309,
						"acc_stderr,none": 0.012682569054907637,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7769688947716744,
						"acc_stderr,none": 0.010712628906979185,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7107875579086698,
						"acc_stderr,none": 0.01166782538830548,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5671740569159497,
						"acc_stderr,none": 0.012750474502985821,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5982792852415619,
						"acc_stderr,none": 0.012616114526927902,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6611515552614163,
						"acc_stderr,none": 0.01218049075873903,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5380542686962276,
						"acc_stderr,none": 0.01282980472032169,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6790205162144275,
						"acc_stderr,none": 0.012014110213469814,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5506287227001986,
						"acc_stderr,none": 0.012800991591293375,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5903375248180013,
						"acc_stderr,none": 0.012655369030750352,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6267372600926538,
						"acc_stderr,none": 0.012446911553527132,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8204090806922903,
						"acc_stderr,none": 0.035584246762786625,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8778494623655914,
						"acc_stderr,none": 0.0067926565019669274,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.048740641331093675,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7612095933263816,
						"acc_stderr,none": 0.013774562360361408,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7680608365019012,
						"acc_stderr,none": 0.026075593860304686,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6857142857142857,
						"acc_stderr,none": 0.0261980577440264,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7936507936507936,
						"acc_stderr,none": 0.018043971660827253,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C1-rwkv-190_pth"
	},
	"./rwkv-x-dev/1_3-C1-rwkv-20_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6423337091319054,
						"acc_norm,none": 0.6431792559188275,
						"acc_norm_stderr,none": 0.08733540954677337,
						"acc_stderr,none": 0.10605633846300538,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.494375,
						"acc_stderr,none": 0.050878728056632946,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.06315,
						"acc_stderr,none": 0.03881666768923503,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8094029850746268,
						"acc_stderr,none": 0.16446663760516048,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.28677563150074287,
						"acc_norm,none": 0.28677563150074287,
						"acc_norm_stderr,none": 0.12398063401197369,
						"acc_stderr,none": 0.12398063401197369,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.3112588499395612,
						"acc_norm,none": 0.3112588499395612,
						"acc_norm_stderr,none": 0.058541543051237814,
						"acc_stderr,none": 0.058541543051237814,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.467445587358378,
						"likelihood_diff_stderr,none": 0.5014990808316656,
						"pct_stereotype,none": 0.6037567084078712,
						"pct_stereotype_stderr,none": 0.07319414171523948
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.1220472440944882,
						"exact_match_stderr,none": 0.0072634775762681625
					},
					"glue": {
						"acc,none": 0.7358116960457362,
						"acc_stderr,none": 0.004258104109331259,
						"alias": "glue",
						"f1,none": 0.7073852947454761,
						"f1_stderr,none": 0.00012848373710171084,
						"mcc,none": 0.2694811177512579,
						"mcc_stderr,none": 0.030245598408629608
					},
					"kmmlu": {
						"acc,none": 0.27825584753104243,
						"acc_norm,none": 0.27825584753104243,
						"acc_norm_stderr,none": 0.025580403792291862,
						"acc_stderr,none": 0.025580403792291862,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5215961411971058,
						"acc_norm,none": 0.568,
						"acc_norm_stderr,none": 0.0004917354709418831,
						"acc_stderr,none": 0.043040567138276385,
						"alias": "kobest",
						"f1,none": 0.41937880984955084,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7137589753541627,
						"acc_stderr,none": 0.018286984380495194,
						"alias": "lambada",
						"perplexity,none": 3.5993048793430886,
						"perplexity_stderr,none": 0.18445497416891324
					},
					"lambada_cloze": {
						"acc,none": 0.0504560450223171,
						"acc_stderr,none": 0.0072666203486836615,
						"alias": "lambada_cloze",
						"perplexity,none": 344.1803676557841,
						"perplexity_stderr,none": 46.37133693633087
					},
					"lambada_multilingual": {
						"acc,none": 0.5372404424607025,
						"acc_stderr,none": 0.0819835037291604,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.09223250693827,
						"perplexity_stderr,none": 8.15315907301449
					},
					"mmlu": {
						"acc,none": 0.3946731234866828,
						"acc_stderr,none": 0.08172732864869785,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3770456960680127,
						"acc_stderr,none": 0.0833025465313835,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.44834245252655297,
						"acc_stderr,none": 0.07873531811155372,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4241143971400715,
						"acc_stderr,none": 0.07010629218516794,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.33935934031081505,
						"acc_stderr,none": 0.06813732611610773,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.37856635911994324,
						"acc_norm,none": 0.3469537440056363,
						"acc_norm_stderr,none": 0.00010326356053587125,
						"acc_stderr,none": 0.08578365962364212,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.48878571428571427,
						"acc_stderr,none": 0.0531608180258701,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7335597392857874,
						"acc_norm,none": 0.6471557906415375,
						"acc_norm_stderr,none": 0.00969326042500496,
						"acc_stderr,none": 0.1542400179335276,
						"alias": "pythia",
						"bits_per_byte,none": 0.6342174941165662,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.552095677770228,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2605044990978005,
						"perplexity_stderr,none": 0.06330509246371822,
						"word_perplexity,none": 10.493665074111934,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3953900709219858,
						"acc_norm,none": 0.44680851063829785,
						"acc_norm_stderr,none": 0.0614108835655997,
						"acc_stderr,none": 0.04561195848693889,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.7080962363981231,
						"acc_stderr,none": 0.09171748368354668,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.32460579604328094,
						"acc_stderr,none": 0.0015721043673622636,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3243574051407589,
						"bleu_acc_stderr,none": 0.016387976779647942,
						"bleu_diff,none": -7.4283733093305155,
						"bleu_diff_stderr,none": 0.8539471179702989,
						"bleu_max,none": 26.647563554192672,
						"bleu_max_stderr,none": 0.8099098219963794,
						"rouge1_acc,none": 0.2876376988984088,
						"rouge1_acc_stderr,none": 0.015846315101394812,
						"rouge1_diff,none": -9.472806607867218,
						"rouge1_diff_stderr,none": 0.9286012313264207,
						"rouge1_max,none": 51.57859500885156,
						"rouge1_max_stderr,none": 0.892985034031209,
						"rouge2_acc,none": 0.2533659730722154,
						"rouge2_acc_stderr,none": 0.01522589934082685,
						"rouge2_diff,none": -11.655543040139788,
						"rouge2_diff_stderr,none": 1.1195562445681042,
						"rouge2_max,none": 35.2783620759863,
						"rouge2_max_stderr,none": 1.0366655166805636,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.015764770836777308,
						"rougeL_diff,none": -9.923242719170018,
						"rougeL_diff_stderr,none": 0.9411537988243618,
						"rougeL_max,none": 48.68569856965994,
						"rougeL_max_stderr,none": 0.9086945723864407
					},
					"xcopa": {
						"acc,none": 0.6207272727272727,
						"acc_stderr,none": 0.06788280987857083,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4367871485943775,
						"acc_stderr,none": 0.05171860349647895,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.626376270982492,
						"acc_stderr,none": 0.06212376714652831,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8210833895257361,
						"acc_stderr,none": 0.03574035419898364,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6423337091319054,
						"acc_norm,none": 0.6431792559188275,
						"acc_norm_stderr,none": 0.08733540954677337,
						"acc_stderr,none": 0.10605633846300538,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.494375,
						"acc_stderr,none": 0.050878728056632946,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.599,
						"acc_stderr,none": 0.015506109745498329,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.453,
						"acc_stderr,none": 0.015749255189977596,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.44166666666666665,
						"acc_stderr,none": 0.014341167179902796,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4180887372013652,
						"acc_norm,none": 0.4590443686006826,
						"acc_norm_stderr,none": 0.014562291073601226,
						"acc_stderr,none": 0.014413988396996083,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7529461279461279,
						"acc_norm,none": 0.734006734006734,
						"acc_norm_stderr,none": 0.009066789565615694,
						"acc_stderr,none": 0.008850055161459236,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.06315,
						"acc_stderr,none": 0.03881666768923503,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.038,
						"acc_stderr,none": 0.0042763469891703145,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.1465,
						"acc_stderr,none": 0.007908865283657356,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.072,
						"acc_stderr,none": 0.005781410931267408,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.024,
						"acc_stderr,none": 0.0034231358327511327,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.021,
						"acc_stderr,none": 0.003206967776757454,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.1085,
						"acc_stderr,none": 0.006956153321665585,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0055,
						"acc_stderr,none": 0.0016541593398342208,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.129,
						"acc_stderr,none": 0.007497173054018491,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.003,
						"acc_stderr,none": 0.0012232122154647153,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.084,
						"acc_stderr,none": 0.006204131335071225,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.005206073752711497,
						"acc_stderr,none": 0.0014992721829171593,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8094029850746268,
						"acc_stderr,none": 0.16446663760516048,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621224,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689092,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437686,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.01141991306509869,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745927,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.789,
						"acc_stderr,none": 0.01290913032104209,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.555,
						"acc_stderr,none": 0.01572330188676094,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381795,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357794,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098728,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256553,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832024,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666667,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.00648892179842742,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.00985982840703719,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796391,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727073,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936703,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.015090650341444233,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.691,
						"acc_stderr,none": 0.014619600977206494,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.734,
						"acc_stderr,none": 0.013979965645145156,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.00757207609155742,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336666,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910639,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.434,
						"acc_stderr,none": 0.015680876566375058,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621247,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386698,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.575,
						"acc_stderr,none": 0.0156403203170401,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.703,
						"acc_stderr,none": 0.014456832294801108,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753656,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.01028132801274739,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240644,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996664,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.01507060460376841,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866442,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.349,
						"acc_stderr,none": 0.015080663991563098,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.01562562511262065,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.015794475789511472,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.012259457340938579,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.549,
						"acc_stderr,none": 0.015743152379585526,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525057,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.00940661918462123,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.01360735683959812,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753651,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298315,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074786,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.837,
						"acc_stderr,none": 0.011686212712746847,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.551,
						"acc_stderr,none": 0.015736792768752023,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323487,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504392,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274703,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.691,
						"acc_stderr,none": 0.014619600977206488,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.435,
						"acc_stderr,none": 0.0156850572527172,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.01057013376110865,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400234,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.597,
						"acc_stderr,none": 0.015518757419066536,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475287,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336664,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.015417317979911076,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.842,
						"acc_stderr,none": 0.011539894677559545,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697067,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.00848457353011858,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727073,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727047,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.015039986742055238,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.014498627873361427,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7235474006116208,
						"acc_stderr,none": 0.007822334302689731,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8571428571428571,
						"acc_stderr,none": 0.04718416136255829,
						"alias": "cb",
						"f1,none": 0.6992337164750958,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.28677563150074287,
						"acc_norm,none": 0.28677563150074287,
						"acc_norm_stderr,none": 0.12398063401197369,
						"acc_stderr,none": 0.12398063401197369,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141221,
						"acc_stderr,none": 0.06372446937141221,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5151515151515151,
						"acc_norm,none": 0.5151515151515151,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.23404255319148937,
						"acc_norm,none": 0.23404255319148937,
						"acc_norm_stderr,none": 0.062426763436828805,
						"acc_stderr,none": 0.062426763436828805,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.060606060606060594,
						"acc_stderr,none": 0.060606060606060594,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.12909944487358055,
						"acc_stderr,none": 0.12909944487358055,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894599,
						"acc_stderr,none": 0.10540925533894599,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966263,
						"acc_stderr,none": 0.06520506636966263,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033673,
						"acc_stderr,none": 0.10083169033033673,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633637,
						"acc_stderr,none": 0.09169709590633637,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.058172215566282534,
						"acc_stderr,none": 0.058172215566282534,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.5227272727272727,
						"acc_norm,none": 0.5227272727272727,
						"acc_norm_stderr,none": 0.07617047451458002,
						"acc_stderr,none": 0.07617047451458002,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.3695652173913043,
						"acc_norm,none": 0.3695652173913043,
						"acc_norm_stderr,none": 0.07195473383945741,
						"acc_stderr,none": 0.07195473383945741,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.3112588499395612,
						"acc_norm,none": 0.3112588499395612,
						"acc_norm_stderr,none": 0.058541543051237814,
						"acc_stderr,none": 0.058541543051237814,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.30177514792899407,
						"acc_norm,none": 0.30177514792899407,
						"acc_norm_stderr,none": 0.03541479614288122,
						"acc_stderr,none": 0.03541479614288122,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.03662869876642904,
						"acc_stderr,none": 0.03662869876642904,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.4125,
						"acc_norm,none": 0.4125,
						"acc_norm_stderr,none": 0.0390406778668338,
						"acc_stderr,none": 0.0390406778668338,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624336,
						"acc_stderr,none": 0.03546563019624336,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.35406698564593303,
						"acc_norm,none": 0.35406698564593303,
						"acc_norm_stderr,none": 0.0331592569829487,
						"acc_stderr,none": 0.0331592569829487,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.32061068702290074,
						"acc_norm,none": 0.32061068702290074,
						"acc_norm_stderr,none": 0.040933292298342784,
						"acc_stderr,none": 0.040933292298342784,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3602941176470588,
						"acc_norm,none": 0.3602941176470588,
						"acc_norm_stderr,none": 0.041319197084091215,
						"acc_stderr,none": 0.041319197084091215,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.35514018691588783,
						"acc_norm,none": 0.35514018691588783,
						"acc_norm_stderr,none": 0.04648144634449114,
						"acc_stderr,none": 0.04648144634449114,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3560371517027864,
						"acc_norm,none": 0.3560371517027864,
						"acc_norm_stderr,none": 0.026683950692610883,
						"acc_stderr,none": 0.026683950692610883,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.03166009679399812,
						"acc_stderr,none": 0.03166009679399812,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.39106145251396646,
						"acc_norm,none": 0.39106145251396646,
						"acc_norm_stderr,none": 0.03657625502786071,
						"acc_stderr,none": 0.03657625502786071,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24472573839662448,
						"acc_norm,none": 0.24472573839662448,
						"acc_norm_stderr,none": 0.027985699387036427,
						"acc_stderr,none": 0.027985699387036427,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.42990654205607476,
						"acc_norm,none": 0.42990654205607476,
						"acc_norm_stderr,none": 0.04808472349429953,
						"acc_stderr,none": 0.04808472349429953,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.0401910747255735,
						"acc_stderr,none": 0.0401910747255735,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.0435054681899906,
						"acc_stderr,none": 0.0435054681899906,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.27472527472527475,
						"acc_norm,none": 0.27472527472527475,
						"acc_norm_stderr,none": 0.027065504564389522,
						"acc_stderr,none": 0.027065504564389522,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3431372549019608,
						"acc_norm,none": 0.3431372549019608,
						"acc_norm_stderr,none": 0.03332139944668086,
						"acc_stderr,none": 0.03332139944668086,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.26900584795321636,
						"acc_norm,none": 0.26900584795321636,
						"acc_norm_stderr,none": 0.03401052620104089,
						"acc_stderr,none": 0.03401052620104089,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.03814280082617517,
						"acc_stderr,none": 0.03814280082617517,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.26618705035971224,
						"acc_norm,none": 0.26618705035971224,
						"acc_norm_stderr,none": 0.03762240935089088,
						"acc_stderr,none": 0.03762240935089088,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.03815152004368298,
						"acc_stderr,none": 0.03815152004368298,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3496932515337423,
						"acc_norm,none": 0.3496932515337423,
						"acc_norm_stderr,none": 0.03746668325470021,
						"acc_stderr,none": 0.03746668325470021,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.29069767441860467,
						"acc_norm,none": 0.29069767441860467,
						"acc_norm_stderr,none": 0.034724693044775996,
						"acc_stderr,none": 0.034724693044775996,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2976190476190476,
						"acc_norm,none": 0.2976190476190476,
						"acc_norm_stderr,none": 0.02885890598472122,
						"acc_stderr,none": 0.02885890598472122,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.031911782267135445,
						"acc_stderr,none": 0.031911782267135445,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.4411764705882353,
						"acc_norm,none": 0.4411764705882353,
						"acc_norm_stderr,none": 0.032252942323996406,
						"acc_stderr,none": 0.032252942323996406,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.028361099300075063,
						"acc_stderr,none": 0.028361099300075063,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03944624162501117,
						"acc_stderr,none": 0.03944624162501117,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.03822127078536156,
						"acc_stderr,none": 0.03822127078536156,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.32954545454545453,
						"acc_norm,none": 0.32954545454545453,
						"acc_norm_stderr,none": 0.035532299023675745,
						"acc_stderr,none": 0.035532299023675745,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.30201342281879195,
						"acc_norm,none": 0.30201342281879195,
						"acc_norm_stderr,none": 0.03774033930941343,
						"acc_stderr,none": 0.03774033930941343,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3050847457627119,
						"acc_norm,none": 0.3050847457627119,
						"acc_norm_stderr,none": 0.042567999262880046,
						"acc_stderr,none": 0.042567999262880046,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.03494959016177541,
						"acc_stderr,none": 0.03494959016177541,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878285,
						"acc_stderr,none": 0.04122066502878285,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.040368457798807794,
						"acc_stderr,none": 0.040368457798807794,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.30158730158730157,
						"acc_norm,none": 0.30158730158730157,
						"acc_norm_stderr,none": 0.04104947269903394,
						"acc_stderr,none": 0.04104947269903394,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2810810810810811,
						"acc_norm,none": 0.2810810810810811,
						"acc_norm_stderr,none": 0.03313956873549873,
						"acc_stderr,none": 0.03313956873549873,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3546511627906977,
						"acc_norm,none": 0.3546511627906977,
						"acc_norm_stderr,none": 0.036584734259385424,
						"acc_stderr,none": 0.036584734259385424,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2822384428223844,
						"acc_norm,none": 0.2822384428223844,
						"acc_norm_stderr,none": 0.02222830014542446,
						"acc_stderr,none": 0.02222830014542446,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.4532710280373832,
						"acc_norm,none": 0.4532710280373832,
						"acc_norm_stderr,none": 0.03410948976343204,
						"acc_stderr,none": 0.03410948976343204,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3252032520325203,
						"acc_norm,none": 0.3252032520325203,
						"acc_norm_stderr,none": 0.042411537335732975,
						"acc_stderr,none": 0.042411537335732975,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.30327868852459017,
						"acc_norm,none": 0.30327868852459017,
						"acc_norm_stderr,none": 0.04178859878631876,
						"acc_stderr,none": 0.04178859878631876,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3476190476190476,
						"acc_norm,none": 0.3476190476190476,
						"acc_norm_stderr,none": 0.032940430891650836,
						"acc_stderr,none": 0.032940430891650836,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.03425177889602087,
						"acc_stderr,none": 0.03425177889602087,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31216931216931215,
						"acc_norm,none": 0.31216931216931215,
						"acc_norm_stderr,none": 0.03379535035917228,
						"acc_stderr,none": 0.03379535035917228,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.28448275862068967,
						"acc_norm,none": 0.28448275862068967,
						"acc_norm_stderr,none": 0.042071607555840204,
						"acc_stderr,none": 0.042071607555840204,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.296551724137931,
						"acc_norm,none": 0.296551724137931,
						"acc_norm_stderr,none": 0.038061426873099935,
						"acc_stderr,none": 0.038061426873099935,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.34285714285714286,
						"acc_norm,none": 0.34285714285714286,
						"acc_norm_stderr,none": 0.04654465622977447,
						"acc_stderr,none": 0.04654465622977447,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03424737867752743,
						"acc_stderr,none": 0.03424737867752743,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846655,
						"acc_stderr,none": 0.030469670650846655,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2632978723404255,
						"acc_norm,none": 0.2632978723404255,
						"acc_norm_stderr,none": 0.022743327388426434,
						"acc_stderr,none": 0.022743327388426434,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.34051724137931033,
						"acc_norm,none": 0.34051724137931033,
						"acc_norm_stderr,none": 0.031179222859254785,
						"acc_stderr,none": 0.031179222859254785,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03584022203803134,
						"acc_stderr,none": 0.03584022203803134,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03944624162501116,
						"acc_stderr,none": 0.03944624162501116,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3495575221238938,
						"acc_norm,none": 0.3495575221238938,
						"acc_norm_stderr,none": 0.0317886814832993,
						"acc_stderr,none": 0.0317886814832993,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3393939393939394,
						"acc_norm,none": 0.3393939393939394,
						"acc_norm_stderr,none": 0.03697442205031596,
						"acc_stderr,none": 0.03697442205031596,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.31952662721893493,
						"acc_norm,none": 0.31952662721893493,
						"acc_norm_stderr,none": 0.03597530251676527,
						"acc_stderr,none": 0.03597530251676527,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3416149068322981,
						"acc_norm,none": 0.3416149068322981,
						"acc_norm_stderr,none": 0.03749284617282493,
						"acc_stderr,none": 0.03749284617282493,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.03782614981812041,
						"acc_stderr,none": 0.03782614981812041,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.2694811177512579,
						"mcc_stderr,none": 0.030245598408629608
					},
					"copa": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.03684529491774709,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.467445587358378,
						"likelihood_diff_stderr,none": 0.5014990808316656,
						"pct_stereotype,none": 0.6037567084078712,
						"pct_stereotype_stderr,none": 0.07319414171523948
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.686046511627907,
						"likelihood_diff_stderr,none": 0.08676223377205051,
						"pct_stereotype,none": 0.6416219439475254,
						"pct_stereotype_stderr,none": 0.011713139129932814
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.146978021978022,
						"likelihood_diff_stderr,none": 0.3887664100798249,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.7727272727272725,
						"likelihood_diff_stderr,none": 1.791934120597625,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.880769230769231,
						"likelihood_diff_stderr,none": 0.6320541263523665,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.653515625,
						"likelihood_diff_stderr,none": 0.16132925168206239,
						"pct_stereotype,none": 0.6375,
						"pct_stereotype_stderr,none": 0.02691527109619776
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.6197916666666665,
						"likelihood_diff_stderr,none": 0.2410562555466635,
						"pct_stereotype,none": 0.5787037037037037,
						"pct_stereotype_stderr,none": 0.03367462138896078
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.828125,
						"likelihood_diff_stderr,none": 0.32102455184354023,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.5199311023622046,
						"likelihood_diff_stderr,none": 0.15010581438360973,
						"pct_stereotype,none": 0.547244094488189,
						"pct_stereotype_stderr,none": 0.022106430541228055
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.550675675675676,
						"likelihood_diff_stderr,none": 0.33922809765387124,
						"pct_stereotype,none": 0.7207207207207207,
						"pct_stereotype_stderr,none": 0.04277662524881439
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.03494623655914,
						"likelihood_diff_stderr,none": 0.4373616691389125,
						"pct_stereotype,none": 0.9139784946236559,
						"pct_stereotype_stderr,none": 0.029233283218071043
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.223684210526316,
						"likelihood_diff_stderr,none": 0.24880805639958867,
						"pct_stereotype,none": 0.6684210526315789,
						"pct_stereotype_stderr,none": 0.0342442478876195
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.248024746571258,
						"likelihood_diff_stderr,none": 0.07499881814918674,
						"pct_stereotype,none": 0.5658914728682171,
						"pct_stereotype_stderr,none": 0.012106782103996001
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.297222222222222,
						"likelihood_diff_stderr,none": 0.316693632905748,
						"pct_stereotype,none": 0.5222222222222223,
						"pct_stereotype_stderr,none": 0.05294752255076824
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.6153846153846154,
						"likelihood_diff_stderr,none": 0.7395724816549774,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.7272727272727275,
						"likelihood_diff_stderr,none": 0.41520936081440196,
						"pct_stereotype,none": 0.696969696969697,
						"pct_stereotype_stderr,none": 0.057002420795512765
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.754283489096573,
						"likelihood_diff_stderr,none": 0.12942695241905888,
						"pct_stereotype,none": 0.5669781931464174,
						"pct_stereotype_stderr,none": 0.02769893509024991
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.440711462450593,
						"likelihood_diff_stderr,none": 0.19737997271372398,
						"pct_stereotype,none": 0.4268774703557312,
						"pct_stereotype_stderr,none": 0.031158395621279217
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.5868055555555554,
						"likelihood_diff_stderr,none": 0.4646691751633447,
						"pct_stereotype,none": 0.6388888888888888,
						"pct_stereotype_stderr,none": 0.057003814617008604
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.8040760869565218,
						"likelihood_diff_stderr,none": 0.13997168962220394,
						"pct_stereotype,none": 0.4652173913043478,
						"pct_stereotype_stderr,none": 0.023281462893244318
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.5228260869565218,
						"likelihood_diff_stderr,none": 0.27889201431056493,
						"pct_stereotype,none": 0.7739130434782608,
						"pct_stereotype_stderr,none": 0.039177021170945875
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.818681318681319,
						"likelihood_diff_stderr,none": 0.34257041053361204,
						"pct_stereotype,none": 0.7582417582417582,
						"pct_stereotype_stderr,none": 0.04513082148355001
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.828125,
						"likelihood_diff_stderr,none": 0.24733145989032265,
						"pct_stereotype,none": 0.7295918367346939,
						"pct_stereotype_stderr,none": 0.03180772269593479
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.1220472440944882,
						"exact_match_stderr,none": 0.0072634775762681625
					},
					"glue": {
						"acc,none": 0.7358116960457362,
						"acc_stderr,none": 0.004258104109331259,
						"alias": "glue",
						"f1,none": 0.7073852947454761,
						"f1_stderr,none": 0.00012848373710171084,
						"mcc,none": 0.2694811177512579,
						"mcc_stderr,none": 0.030245598408629608
					},
					"hellaswag": {
						"acc,none": 0.5392352121091416,
						"acc_norm,none": 0.7261501692889862,
						"acc_norm_stderr,none": 0.004450214826707173,
						"acc_stderr,none": 0.004974395131539592,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.27825584753104243,
						"acc_norm,none": 0.27825584753104243,
						"acc_norm_stderr,none": 0.025580403792291862,
						"acc_stderr,none": 0.025580403792291862,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816508,
						"acc_stderr,none": 0.04229525846816508,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.274,
						"acc_norm,none": 0.274,
						"acc_norm_stderr,none": 0.014111099288259585,
						"acc_stderr,none": 0.014111099288259585,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.014326941797231561,
						"acc_stderr,none": 0.014326941797231561,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.01392928659425971,
						"acc_stderr,none": 0.01392928659425971,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729488,
						"acc_stderr,none": 0.014013292702729488,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.24666666666666667,
						"acc_norm,none": 0.24666666666666667,
						"acc_norm_stderr,none": 0.017613084291727015,
						"acc_stderr,none": 0.017613084291727015,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.279,
						"acc_norm,none": 0.279,
						"acc_norm_stderr,none": 0.014190150117612032,
						"acc_stderr,none": 0.014190150117612032,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.324,
						"acc_norm,none": 0.324,
						"acc_norm_stderr,none": 0.014806864733738863,
						"acc_stderr,none": 0.014806864733738863,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.302,
						"acc_norm,none": 0.302,
						"acc_norm_stderr,none": 0.014526080235459548,
						"acc_stderr,none": 0.014526080235459548,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.03232801420614266,
						"acc_stderr,none": 0.03232801420614266,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361425,
						"acc_stderr,none": 0.014498627873361425,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.035218036253024915,
						"acc_stderr,none": 0.035218036253024915,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.014221154708434944,
						"acc_stderr,none": 0.014221154708434944,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.314,
						"acc_norm,none": 0.314,
						"acc_norm_stderr,none": 0.014683991951087974,
						"acc_stderr,none": 0.014683991951087974,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.01382541652689503,
						"acc_stderr,none": 0.01382541652689503,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306263,
						"acc_stderr,none": 0.014158794845306263,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729488,
						"acc_stderr,none": 0.014013292702729488,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809944,
						"acc_stderr,none": 0.013963164754809944,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.014282120955200492,
						"acc_stderr,none": 0.014282120955200492,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.302,
						"acc_norm,none": 0.302,
						"acc_norm_stderr,none": 0.014526080235459546,
						"acc_stderr,none": 0.014526080235459546,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.044619604333847394,
						"acc_stderr,none": 0.044619604333847394,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485247,
						"acc_stderr,none": 0.014174516461485247,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.01449862787336143,
						"acc_stderr,none": 0.01449862787336143,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485246,
						"acc_stderr,none": 0.014174516461485246,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881424,
						"acc_stderr,none": 0.013588548437881424,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145165,
						"acc_stderr,none": 0.013979965645145165,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.013531522534515445,
						"acc_stderr,none": 0.013531522534515445,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.25166666666666665,
						"acc_norm,none": 0.25166666666666665,
						"acc_norm_stderr,none": 0.017731561494907167,
						"acc_stderr,none": 0.017731561494907167,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809946,
						"acc_stderr,none": 0.013963164754809946,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.01420569610409151,
						"acc_stderr,none": 0.01420569610409151,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.014127086556490528,
						"acc_stderr,none": 0.014127086556490528,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.014282120955200482,
						"acc_stderr,none": 0.014282120955200482,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322674,
						"acc_stderr,none": 0.041633319989322674,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.25666666666666665,
						"acc_norm,none": 0.25666666666666665,
						"acc_norm_stderr,none": 0.025260441987310474,
						"acc_stderr,none": 0.025260441987310474,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568198,
						"acc_stderr,none": 0.014029819522568198,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.296,
						"acc_norm,none": 0.296,
						"acc_norm_stderr,none": 0.014442734941575018,
						"acc_stderr,none": 0.014442734941575018,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.01423652621529135,
						"acc_stderr,none": 0.01423652621529135,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.03109395714370027,
						"acc_stderr,none": 0.03109395714370027,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.258,
						"acc_norm,none": 0.258,
						"acc_norm_stderr,none": 0.013842963108656601,
						"acc_stderr,none": 0.013842963108656601,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440467,
						"acc_stderr,none": 0.013946271849440467,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.030897382432918608,
						"acc_stderr,none": 0.030897382432918608,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.347,
						"acc_norm,none": 0.347,
						"acc_norm_stderr,none": 0.01506047203170662,
						"acc_stderr,none": 0.01506047203170662,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5215961411971058,
						"acc_norm,none": 0.568,
						"acc_norm_stderr,none": 0.0004917354709418831,
						"acc_stderr,none": 0.043040567138276385,
						"alias": "kobest",
						"f1,none": 0.41937880984955084,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5149572649572649,
						"acc_stderr,none": 0.013342793395105116,
						"alias": " - kobest_boolq",
						"f1,none": 0.3645864000579507,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": " - kobest_copa",
						"f1,none": 0.6176691061585379,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.432,
						"acc_norm,none": 0.568,
						"acc_norm_stderr,none": 0.022175109265613172,
						"acc_stderr,none": 0.022175109265613155,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.4267658040320321,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5188916876574308,
						"acc_stderr,none": 0.025108004284191587,
						"alias": " - kobest_sentineg",
						"f1,none": 0.3943965689367378,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7137589753541627,
						"acc_stderr,none": 0.018286984380495194,
						"alias": "lambada",
						"perplexity,none": 3.5993048793430886,
						"perplexity_stderr,none": 0.18445497416891324
					},
					"lambada_cloze": {
						"acc,none": 0.0504560450223171,
						"acc_stderr,none": 0.0072666203486836615,
						"alias": "lambada_cloze",
						"perplexity,none": 344.1803676557841,
						"perplexity_stderr,none": 46.37133693633087
					},
					"lambada_multilingual": {
						"acc,none": 0.5372404424607025,
						"acc_stderr,none": 0.0819835037291604,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.09223250693827,
						"perplexity_stderr,none": 8.15315907301449
					},
					"lambada_openai": {
						"acc,none": 0.7488841451581603,
						"acc_stderr,none": 0.006041662455556342,
						"alias": " - lambada_openai",
						"perplexity,none": 3.2605044990978005,
						"perplexity_stderr,none": 0.06330509246371822
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.037259848631864934,
						"acc_stderr,none": 0.0026386826302632065,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 254.01729689234708,
						"perplexity_stderr,none": 7.754134280996776
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.425383271880458,
						"acc_stderr,none": 0.006887972570117882,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.99419256700746,
						"perplexity_stderr,none": 1.9453421682335441
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7483019600232874,
						"acc_stderr,none": 0.00604631029126968,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.2595510066286004,
						"perplexity_stderr,none": 0.06331565177911794
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4566272074519697,
						"acc_stderr,none": 0.0069397193846110145,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.546959423157762,
						"perplexity_stderr,none": 1.3984498856285135
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5445371628177761,
						"acc_stderr,none": 0.006938287769723249,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.55616582235037,
						"perplexity_stderr,none": 0.8066948660179355
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5113526101300213,
						"acc_stderr,none": 0.0069641818508393285,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.10429371554718,
						"perplexity_stderr,none": 1.1742341943926726
					},
					"lambada_standard": {
						"acc,none": 0.6794100523966622,
						"acc_stderr,none": 0.006502090459040096,
						"alias": " - lambada_standard",
						"perplexity,none": 3.9389133823248184,
						"perplexity_stderr,none": 0.07985188830294608
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.06365224141276926,
						"acc_stderr,none": 0.0034012418258709304,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 434.3434384192212,
						"perplexity_stderr,none": 13.244392536435226
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.33142493638676845,
						"exact_match_stderr,get-answer": 0.011876256955227113
					},
					"logiqa": {
						"acc,none": 0.2488479262672811,
						"acc_norm,none": 0.29493087557603687,
						"acc_norm_stderr,none": 0.017886249734104392,
						"acc_stderr,none": 0.016957985904525588,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.25763358778625955,
						"acc_norm,none": 0.2919847328244275,
						"acc_norm_stderr,none": 0.011471317249048256,
						"acc_stderr,none": 0.011033728806497419,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25829145728643216,
						"acc_norm,none": 0.25996649916247905,
						"acc_norm_stderr,none": 0.008029434758777935,
						"acc_stderr,none": 0.00801257767084925,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.4493751323872061,
						"acc_stderr,none": 0.005119451461072863,
						"alias": "mc_taco",
						"f1,none": 0.5407649500927479,
						"f1_stderr,none": 0.005678614688449268
					},
					"medmcqa": {
						"acc,none": 0.3420989720296438,
						"acc_norm,none": 0.3420989720296438,
						"acc_norm_stderr,none": 0.007336081590685145,
						"acc_stderr,none": 0.007336081590685145,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3495679497250589,
						"acc_norm,none": 0.3495679497250589,
						"acc_norm_stderr,none": 0.013369747785840106,
						"acc_stderr,none": 0.013369747785840106,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.3946731234866828,
						"acc_stderr,none": 0.08172732864869785,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909282,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.3925925925925926,
						"acc_stderr,none": 0.0421850621536888,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3815789473684211,
						"acc_stderr,none": 0.03953173377749193,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.41132075471698115,
						"acc_stderr,none": 0.03028500925900979,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4027777777777778,
						"acc_stderr,none": 0.04101405519842426,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252604,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720683,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3699421965317919,
						"acc_stderr,none": 0.0368122963339432,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171452,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.04960449637488585,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.35319148936170214,
						"acc_stderr,none": 0.031245325202761926,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.03999423879281337,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.41379310344827586,
						"acc_stderr,none": 0.04104269211806231,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.30423280423280424,
						"acc_stderr,none": 0.023695415009463084,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.31746031746031744,
						"acc_stderr,none": 0.04163453031302859,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4612903225806452,
						"acc_stderr,none": 0.028358634859836935,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.031447125816782426,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5212121212121212,
						"acc_stderr,none": 0.03900828913737302,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4444444444444444,
						"acc_stderr,none": 0.03540294377095368,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5440414507772021,
						"acc_stderr,none": 0.035944137112724345,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.34102564102564104,
						"acc_stderr,none": 0.02403548967633508,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.31851851851851853,
						"acc_stderr,none": 0.028406533090608463,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.36554621848739494,
						"acc_stderr,none": 0.03128217706368461,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23178807947019867,
						"acc_stderr,none": 0.034454062719870546,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.48807339449541287,
						"acc_stderr,none": 0.021431223617362233,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.25462962962962965,
						"acc_stderr,none": 0.029711275860005333,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5147058823529411,
						"acc_stderr,none": 0.035077938347913236,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.540084388185654,
						"acc_stderr,none": 0.03244246810187913,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4439461883408072,
						"acc_stderr,none": 0.03334625674242728,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.48854961832061067,
						"acc_stderr,none": 0.043841400240780176,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3770456960680127,
						"acc_stderr,none": 0.0833025465313835,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4380165289256198,
						"acc_stderr,none": 0.045291468044357915,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.42592592592592593,
						"acc_stderr,none": 0.0478034362693679,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.38650306748466257,
						"acc_stderr,none": 0.03825825548848608,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3482142857142857,
						"acc_stderr,none": 0.04521829902833585,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4854368932038835,
						"acc_stderr,none": 0.049486373240266376,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6111111111111112,
						"acc_stderr,none": 0.03193705726200293,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5721583652618135,
						"acc_stderr,none": 0.017692787927803728,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3699421965317919,
						"acc_stderr,none": 0.025992472029306386,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23128491620111732,
						"acc_stderr,none": 0.014102223623152593,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.4019607843137255,
						"acc_stderr,none": 0.028074158947600663,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.44834245252655297,
						"acc_stderr,none": 0.07873531811155372,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.49517684887459806,
						"acc_stderr,none": 0.028396770444111298,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.45987654320987653,
						"acc_stderr,none": 0.02773102275353928,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2765957446808511,
						"acc_stderr,none": 0.02668456434046099,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3344198174706649,
						"acc_stderr,none": 0.012049668983214933,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.40441176470588236,
						"acc_stderr,none": 0.02981263070156974,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.40032679738562094,
						"acc_stderr,none": 0.01982184368827177,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.44545454545454544,
						"acc_stderr,none": 0.047605488214603246,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3346938775510204,
						"acc_stderr,none": 0.030209235226242307,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4241143971400715,
						"acc_stderr,none": 0.07010629218516794,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5373134328358209,
						"acc_stderr,none": 0.03525675167467974,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.33935934031081505,
						"acc_stderr,none": 0.06813732611610773,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.05024183937956914,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3614457831325301,
						"acc_stderr,none": 0.037400593820293204,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.5964912280701754,
						"acc_stderr,none": 0.03762738699917057,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7826795720835455,
						"acc_stderr,none": 0.004163123057694503,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7827502034174125,
						"acc_stderr,none": 0.00415903366518168,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7181372549019608,
						"acc_stderr,none": 0.02230105263124407,
						"alias": "mrpc",
						"f1,none": 0.8249619482496194,
						"f1_stderr,none": 0.016083263482442177
					},
					"multimedqa": {
						"acc,none": 0.37856635911994324,
						"acc_norm,none": 0.3469537440056363,
						"acc_norm_stderr,none": 0.00010326356053587125,
						"acc_stderr,none": 0.08578365962364212,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.4975247524752475,
						"acc_stderr,none": 0.007181715101778629,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.704194885007953,
						"mrr_stderr,none": 0.010337977385425301,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407455,
						"r@2,none": 0.4153498871331828,
						"r@2_stderr,none": 0.01656469454977273
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6538750956478291,
						"mrr_stderr,none": 0.01044019345731896,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4683972911963883,
						"r@2_stderr,none": 0.016773710557640358
					},
					"openbookqa": {
						"acc,none": 0.304,
						"acc_norm,none": 0.414,
						"acc_norm_stderr,none": 0.02204949796982786,
						"acc_stderr,none": 0.02059164957122493,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.011127079848413733,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3755,
						"acc_stderr,none": 0.010830906206990815,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4365,
						"acc_stderr,none": 0.011092583003919652,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5465,
						"acc_stderr,none": 0.011134669525078671,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5525,
						"acc_stderr,none": 0.011121318125943093,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.01115079235234166,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5225,
						"acc_stderr,none": 0.011171807357801176,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48878571428571427,
						"acc_stderr,none": 0.0531608180258701,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7769314472252449,
						"acc_norm,none": 0.7883569096844396,
						"acc_norm_stderr,none": 0.0095303512704794,
						"acc_stderr,none": 0.009713057213018534,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.24690435525192145,
						"acc_norm,none": 0.29067036720751493,
						"acc_norm_stderr,none": 0.0033173980308300417,
						"acc_stderr,none": 0.003150377482430134,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.020435342091896142,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7335597392857874,
						"acc_norm,none": 0.6471557906415375,
						"acc_norm_stderr,none": 0.00969326042500496,
						"acc_stderr,none": 0.1542400179335276,
						"alias": "pythia",
						"bits_per_byte,none": 0.6342174941165662,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.552095677770228,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2605044990978005,
						"perplexity_stderr,none": 0.06330509246371822,
						"word_perplexity,none": 10.493665074111934,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3953900709219858,
						"acc_norm,none": 0.44680851063829785,
						"acc_norm_stderr,none": 0.0614108835655997,
						"acc_stderr,none": 0.04561195848693889,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4583333333333333,
						"acc_norm,none": 0.5583333333333333,
						"acc_norm_stderr,none": 0.04552192400253556,
						"acc_stderr,none": 0.04567549854280213,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.35,
						"acc_norm,none": 0.4625,
						"acc_norm_stderr,none": 0.039540899134978165,
						"acc_stderr,none": 0.0378261498181204,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.39436619718309857,
						"acc_norm,none": 0.3908450704225352,
						"acc_norm_stderr,none": 0.029005007569909827,
						"acc_stderr,none": 0.029051039507650152,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7436804353203067,
						"acc_stderr,none": 0.00217138861735811,
						"alias": "qqp",
						"f1,none": 0.7062891477482074,
						"f1_stderr,none": 0.002758499253292074
					},
					"race": {
						"acc,none": 0.33779904306220093,
						"acc_stderr,none": 0.014637734314782854,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2722,
						"em_stderr,none": 0.004451145613179882,
						"f1,none": 0.2818052383452654,
						"f1_stderr,none": 0.0044620497658353344
					},
					"rte": {
						"acc,none": 0.6787003610108303,
						"acc_stderr,none": 0.02810862605328869,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.957,
						"acc_norm,none": 0.959,
						"acc_norm_stderr,none": 0.006273624021118794,
						"acc_stderr,none": 0.006418114379799741,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6750902527075813,
						"acc_stderr,none": 0.028190822551170353,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.01120598290257748,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5796760971708488,
						"acc_norm,none": 0.7705688293511946,
						"acc_norm_stderr,none": 0.0029727791837198558,
						"acc_stderr,none": 0.003489919926260053,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.7080962363981231,
						"acc_stderr,none": 0.09171748368354668,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6416266025641025,
						"acc_stderr,none": 0.004799306953604282,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.9016925103881626,
						"acc_stderr,none": 0.002997450215474849,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5858823529411765,
						"acc_stderr,none": 0.004877398962018403,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.32460579604328094,
						"acc_stderr,none": 0.0015721043673622636,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3243574051407589,
						"bleu_acc_stderr,none": 0.016387976779647942,
						"bleu_diff,none": -7.4283733093305155,
						"bleu_diff_stderr,none": 0.8539471179702989,
						"bleu_max,none": 26.647563554192672,
						"bleu_max_stderr,none": 0.8099098219963794,
						"rouge1_acc,none": 0.2876376988984088,
						"rouge1_acc_stderr,none": 0.015846315101394812,
						"rouge1_diff,none": -9.472806607867218,
						"rouge1_diff_stderr,none": 0.9286012313264207,
						"rouge1_max,none": 51.57859500885156,
						"rouge1_max_stderr,none": 0.892985034031209,
						"rouge2_acc,none": 0.2533659730722154,
						"rouge2_acc_stderr,none": 0.01522589934082685,
						"rouge2_diff,none": -11.655543040139788,
						"rouge2_diff_stderr,none": 1.1195562445681042,
						"rouge2_max,none": 35.2783620759863,
						"rouge2_max_stderr,none": 1.0366655166805636,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.015764770836777308,
						"rougeL_diff,none": -9.923242719170018,
						"rougeL_diff_stderr,none": 0.9411537988243618,
						"rougeL_max,none": 48.68569856965994,
						"rougeL_max_stderr,none": 0.9086945723864407
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3243574051407589,
						"bleu_acc_stderr,none": 0.016387976779647942,
						"bleu_diff,none": -7.4283733093305155,
						"bleu_diff_stderr,none": 0.8539471179702989,
						"bleu_max,none": 26.647563554192672,
						"bleu_max_stderr,none": 0.8099098219963794,
						"rouge1_acc,none": 0.2876376988984088,
						"rouge1_acc_stderr,none": 0.015846315101394812,
						"rouge1_diff,none": -9.472806607867218,
						"rouge1_diff_stderr,none": 0.9286012313264207,
						"rouge1_max,none": 51.57859500885156,
						"rouge1_max_stderr,none": 0.892985034031209,
						"rouge2_acc,none": 0.2533659730722154,
						"rouge2_acc_stderr,none": 0.01522589934082685,
						"rouge2_diff,none": -11.655543040139788,
						"rouge2_diff_stderr,none": 1.1195562445681042,
						"rouge2_max,none": 35.2783620759863,
						"rouge2_max_stderr,none": 1.0366655166805636,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.015764770836777308,
						"rougeL_diff,none": -9.923242719170018,
						"rougeL_diff_stderr,none": 0.9411537988243618,
						"rougeL_max,none": 48.68569856965994,
						"rougeL_max_stderr,none": 0.9086945723864407
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25091799265605874,
						"acc_stderr,none": 0.015176985027707696,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.39829359943050313,
						"acc_stderr,none": 0.014054221242542327,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.1220472440944882,
						"exact_match_stderr,none": 0.0072634775762681625
					},
					"wic": {
						"acc,none": 0.5736677115987461,
						"acc_stderr,none": 0.019594518675279032,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6342174941165662,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.552095677770228,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.493665074111934,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7079715864246251,
						"acc_stderr,none": 0.012779198491754023,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4788732394366197,
						"acc_stderr,none": 0.05970805879899504,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.41346153846153844,
						"acc_stderr,none": 0.04852294969729053,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8571428571428571,
						"acc_stderr,none": 0.021217447349500148,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6207272727272727,
						"acc_stderr,none": 0.06788280987857083,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.594,
						"acc_stderr,none": 0.021983962090086337,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.02035437548053008,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.019966103540279462,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.02224224437573102,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228134,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.021513662527582404,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.020055833888070897,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.020667032987466104,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4367871485943775,
						"acc_stderr,none": 0.05171860349647895,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3353413654618474,
						"acc_stderr,none": 0.009463034891512706,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4859437751004016,
						"acc_stderr,none": 0.010018111813088546,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4899598393574297,
						"acc_stderr,none": 0.010020052116889137,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3927710843373494,
						"acc_stderr,none": 0.009788891787583071,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5216867469879518,
						"acc_stderr,none": 0.010012641367065516,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5028112449799197,
						"acc_stderr,none": 0.010021914455122176,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5016064257028112,
						"acc_stderr,none": 0.010022021141102096,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42449799196787147,
						"acc_stderr,none": 0.009907151253284268,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4911646586345382,
						"acc_stderr,none": 0.010020508033762624,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39919678714859436,
						"acc_stderr,none": 0.009816285782535863,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40843373493975904,
						"acc_stderr,none": 0.009852581919032235,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.45100401606425705,
						"acc_stderr,none": 0.00997383869127231,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40200803212851405,
						"acc_stderr,none": 0.009827715873484714,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.39759036144578314,
						"acc_stderr,none": 0.009809602996075811,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3477911646586345,
						"acc_stderr,none": 0.00954641176984314,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.626376270982492,
						"acc_stderr,none": 0.06212376714652831,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5890138980807412,
						"acc_stderr,none": 0.012661578894368947,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7789543348775645,
						"acc_stderr,none": 0.010678457581809242,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.71409662475182,
						"acc_stderr,none": 0.01162785634694061,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5658504301786896,
						"acc_stderr,none": 0.01275504628991222,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.600264725347452,
						"acc_stderr,none": 0.012605764077627153,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6624751819986764,
						"acc_stderr,none": 0.01216884022167803,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5400397088021178,
						"acc_stderr,none": 0.01282580237008399,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6757114493712773,
						"acc_stderr,none": 0.012046419229995328,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5499669093315684,
						"acc_stderr,none": 0.012802713598219837,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.586366644606221,
						"acc_stderr,none": 0.012673714851823774,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6273990734612839,
						"acc_stderr,none": 0.012442436359907102,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8210833895257361,
						"acc_stderr,none": 0.03574035419898364,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8756989247311828,
						"acc_stderr,none": 0.006843791800722096,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7685088633993743,
						"acc_stderr,none": 0.013627270272100154,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7832699619771863,
						"acc_stderr,none": 0.025454504291142595,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6793650793650794,
						"acc_stderr,none": 0.02633857021981404,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7956349206349206,
						"acc_stderr,none": 0.017979444509028917,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C1-rwkv-20_pth"
	},
	"./rwkv-x-dev/1_3-C1-rwkv-340_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6423337091319054,
						"acc_norm,none": 0.6406426155580609,
						"acc_norm_stderr,none": 0.08814302697947485,
						"acc_stderr,none": 0.10486023206515292,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4840625,
						"acc_stderr,none": 0.05006935801163411,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0764,
						"acc_stderr,none": 0.05522507005896322,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8094029850746268,
						"acc_stderr,none": 0.1646926428942379,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.27340267459138184,
						"acc_norm,none": 0.27340267459138184,
						"acc_norm_stderr,none": 0.12136062021428108,
						"acc_stderr,none": 0.12136062021428108,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.295372129165947,
						"acc_norm,none": 0.295372129165947,
						"acc_norm_stderr,none": 0.05059886715142641,
						"acc_stderr,none": 0.05059886715142641,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.522529069767442,
						"likelihood_diff_stderr,none": 0.47859364769147594,
						"pct_stereotype,none": 0.6107632677400118,
						"pct_stereotype_stderr,none": 0.06872407549545181
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.10236220472440945,
						"exact_match_stderr,none": 0.0067261325125403445
					},
					"glue": {
						"acc,none": 0.7373005002382087,
						"acc_stderr,none": 0.004347610963871786,
						"alias": "glue",
						"f1,none": 0.6915895843189218,
						"f1_stderr,none": 0.0001783729661553466,
						"mcc,none": 0.1417535089631069,
						"mcc_stderr,none": 0.03181399358723615
					},
					"kmmlu": {
						"acc,none": 0.2907883338146116,
						"acc_norm,none": 0.2907883338146116,
						"acc_norm_stderr,none": 0.03058386605485869,
						"acc_stderr,none": 0.03058386605485869,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5174303880727911,
						"acc_norm,none": 0.562,
						"acc_norm_stderr,none": 0.00049329859719439,
						"acc_stderr,none": 0.04506301520954102,
						"alias": "kobest",
						"f1,none": 0.412674289091718,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7140500679215991,
						"acc_stderr,none": 0.017605085946934364,
						"alias": "lambada",
						"perplexity,none": 3.635681602620688,
						"perplexity_stderr,none": 0.19029035463499064
					},
					"lambada_cloze": {
						"acc,none": 0.038812342324859306,
						"acc_stderr,none": 0.004256370495481683,
						"alias": "lambada_cloze",
						"perplexity,none": 465.9259575971975,
						"perplexity_stderr,none": 90.13327932279957
					},
					"lambada_multilingual": {
						"acc,none": 0.5372016301183776,
						"acc_stderr,none": 0.08533120145608283,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.407895824219466,
						"perplexity_stderr,none": 8.516926622702716
					},
					"mmlu": {
						"acc,none": 0.3934624697336562,
						"acc_stderr,none": 0.08219362248241792,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3691817215727949,
						"acc_stderr,none": 0.08247906472012485,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.4476987447698745,
						"acc_stderr,none": 0.08220735939276784,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.42996425089372764,
						"acc_stderr,none": 0.06515156424970105,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.340627973358706,
						"acc_stderr,none": 0.0714832190512744,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.3801277501774308,
						"acc_norm,none": 0.34675858774560314,
						"acc_norm_stderr,none": 0.00010002717895680177,
						"acc_stderr,none": 0.08866480435737589,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.48571428571428565,
						"acc_stderr,none": 0.05159348161594731,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7331581256726234,
						"acc_norm,none": 0.6445504481874059,
						"acc_norm_stderr,none": 0.009841657213670217,
						"acc_stderr,none": 0.15461466601613438,
						"alias": "pythia",
						"bits_per_byte,none": 0.6344254256651548,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5523193930439774,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2856293242229415,
						"perplexity_stderr,none": 0.06388020579632159,
						"word_perplexity,none": 10.5017557946628,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.39184397163120566,
						"acc_norm,none": 0.44148936170212766,
						"acc_norm_stderr,none": 0.058380053763474334,
						"acc_stderr,none": 0.04842372839967371,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.706199460916442,
						"acc_stderr,none": 0.07057610572446903,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3193481401332778,
						"acc_stderr,none": 0.001601972604193396,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3219094247246022,
						"bleu_acc_stderr,none": 0.0163555676119604,
						"bleu_diff,none": -7.40610643180493,
						"bleu_diff_stderr,none": 0.8747449309518434,
						"bleu_max,none": 26.437827361371447,
						"bleu_max_stderr,none": 0.8076780035603734,
						"rouge1_acc,none": 0.2839657282741738,
						"rouge1_acc_stderr,none": 0.01578537085839672,
						"rouge1_diff,none": -9.611723329840688,
						"rouge1_diff_stderr,none": 0.9554416340882098,
						"rouge1_max,none": 51.45703794169747,
						"rouge1_max_stderr,none": 0.8862402288862491,
						"rouge2_acc,none": 0.25458996328029376,
						"rouge2_acc_stderr,none": 0.015250117079156494,
						"rouge2_diff,none": -11.56260229570307,
						"rouge2_diff_stderr,none": 1.1507108451845416,
						"rouge2_max,none": 35.272306655035464,
						"rouge2_max_stderr,none": 1.0358834198913611,
						"rougeL_acc,none": 0.2876376988984088,
						"rougeL_acc_stderr,none": 0.01584631510139481,
						"rougeL_diff,none": -9.970635212767064,
						"rougeL_diff_stderr,none": 0.9713505887964828,
						"rougeL_max,none": 48.544789560203014,
						"rougeL_max_stderr,none": 0.9033654258123315
					},
					"xcopa": {
						"acc,none": 0.6183636363636363,
						"acc_stderr,none": 0.06879776772236333,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4351807228915663,
						"acc_stderr,none": 0.050261342835443056,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6255339630587811,
						"acc_stderr,none": 0.06260157082331846,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8174870757473589,
						"acc_stderr,none": 0.035900324276513525,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6423337091319054,
						"acc_norm,none": 0.6406426155580609,
						"acc_norm_stderr,none": 0.08814302697947485,
						"acc_stderr,none": 0.10486023206515292,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4840625,
						"acc_stderr,none": 0.05006935801163411,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.587,
						"acc_stderr,none": 0.015577986829936531,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.438,
						"acc_stderr,none": 0.015697210019694693,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.43666666666666665,
						"acc_stderr,none": 0.014323467067418902,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4206484641638225,
						"acc_norm,none": 0.454778156996587,
						"acc_norm_stderr,none": 0.014551507060836353,
						"acc_stderr,none": 0.014426211252508406,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7516835016835017,
						"acc_norm,none": 0.7323232323232324,
						"acc_norm_stderr,none": 0.009085000147099363,
						"acc_stderr,none": 0.008865199020660961,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0764,
						"acc_stderr,none": 0.05522507005896322,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0435,
						"acc_stderr,none": 0.004562267215000669,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.211,
						"acc_stderr,none": 0.009125850128633718,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0905,
						"acc_stderr,none": 0.006416810947142419,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0325,
						"acc_stderr,none": 0.003966073608738817,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.016,
						"acc_stderr,none": 0.0028064101569415328,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.1525,
						"acc_stderr,none": 0.008040783674558802,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0045,
						"acc_stderr,none": 0.0014969954902233147,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.139,
						"acc_stderr,none": 0.007737534149673089,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.0007069298939339254,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.074,
						"acc_stderr,none": 0.005854838987520088,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.006507592190889371,
						"acc_stderr,none": 0.0016751403645020638,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8094029850746268,
						"acc_stderr,none": 0.1646926428942379,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662721,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910652,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.0115694793682713,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491136,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.013127502859696235,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.015726771166750357,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357794,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.864,
						"acc_stderr,none": 0.01084535023047299,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098728,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030084,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.00801893405031515,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817153,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140921,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785141,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240651,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727068,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713329,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.66,
						"acc_stderr,none": 0.014987482264363935,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.014683991951087959,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.741,
						"acc_stderr,none": 0.01386041525752791,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919289,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946099,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178344,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.434,
						"acc_stderr,none": 0.015680876566375058,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662761,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.811,
						"acc_stderr,none": 0.012386784588117716,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.577,
						"acc_stderr,none": 0.01563058909047635,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.709,
						"acc_stderr,none": 0.014370995982377939,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523727,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796401,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866437,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.684,
						"acc_stderr,none": 0.014709193056057127,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.00940661918462124,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.014758652303574881,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.581,
						"acc_stderr,none": 0.015610338967577806,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.515,
						"acc_stderr,none": 0.015812179641814895,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731979,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.541,
						"acc_stderr,none": 0.015766025737882158,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.00944924802766273,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.00940661918462123,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.756,
						"acc_stderr,none": 0.013588548437881421,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753651,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448795,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592074,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.011772110370812182,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.015773547629015106,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557416,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992441,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910643,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.014414290540008205,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.431,
						"acc_stderr,none": 0.015667944488173508,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024963,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.015524980677122581,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.01123486636423524,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946087,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.673,
						"acc_stderr,none": 0.014842213153411245,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.01126614068463216,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474923,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240646,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318227,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.00521950603441005,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.343,
						"acc_stderr,none": 0.015019206922356951,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.299,
						"acc_stderr,none": 0.014484778521220456,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7137614678899082,
						"acc_stderr,none": 0.007905569067672594,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8035714285714286,
						"acc_stderr,none": 0.05357142857142858,
						"alias": "cb",
						"f1,none": 0.651360544217687,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.27340267459138184,
						"acc_norm,none": 0.27340267459138184,
						"acc_norm_stderr,none": 0.12136062021428108,
						"acc_stderr,none": 0.12136062021428108,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2978723404255319,
						"acc_norm,none": 0.2978723404255319,
						"acc_norm_stderr,none": 0.06742861107915607,
						"acc_stderr,none": 0.06742861107915607,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.05781449705557245,
						"acc_stderr,none": 0.05781449705557245,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.1042572070285374,
						"acc_stderr,none": 0.1042572070285374,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.07497837474124878,
						"acc_stderr,none": 0.07497837474124878,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.07099970268936748,
						"acc_stderr,none": 0.07099970268936748,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.295372129165947,
						"acc_norm,none": 0.295372129165947,
						"acc_norm_stderr,none": 0.05059886715142641,
						"acc_stderr,none": 0.05059886715142641,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03560846537586734,
						"acc_stderr,none": 0.03560846537586734,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2635135135135135,
						"acc_norm,none": 0.2635135135135135,
						"acc_norm_stderr,none": 0.036335000433819875,
						"acc_stderr,none": 0.036335000433819875,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.39375,
						"acc_norm,none": 0.39375,
						"acc_norm_stderr,none": 0.03874695666685831,
						"acc_stderr,none": 0.03874695666685831,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0347769116216366,
						"acc_stderr,none": 0.0347769116216366,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3444976076555024,
						"acc_norm,none": 0.3444976076555024,
						"acc_norm_stderr,none": 0.03294948099678349,
						"acc_stderr,none": 0.03294948099678349,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.035156741348767645,
						"acc_stderr,none": 0.035156741348767645,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2824427480916031,
						"acc_norm,none": 0.2824427480916031,
						"acc_norm_stderr,none": 0.03948406125768361,
						"acc_stderr,none": 0.03948406125768361,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.040718744426068945,
						"acc_stderr,none": 0.040718744426068945,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3364485981308411,
						"acc_norm,none": 0.3364485981308411,
						"acc_norm_stderr,none": 0.045892711114716274,
						"acc_stderr,none": 0.045892711114716274,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.33436532507739936,
						"acc_norm,none": 0.33436532507739936,
						"acc_norm_stderr,none": 0.026290609195557965,
						"acc_stderr,none": 0.026290609195557965,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.031980016601150726,
						"acc_stderr,none": 0.031980016601150726,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3407821229050279,
						"acc_norm,none": 0.3407821229050279,
						"acc_norm_stderr,none": 0.03552572003977931,
						"acc_stderr,none": 0.03552572003977931,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24050632911392406,
						"acc_norm,none": 0.24050632911392406,
						"acc_norm_stderr,none": 0.027820781981149675,
						"acc_stderr,none": 0.027820781981149675,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.38317757009345793,
						"acc_norm,none": 0.38317757009345793,
						"acc_norm_stderr,none": 0.04722013080771233,
						"acc_stderr,none": 0.04722013080771233,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3490566037735849,
						"acc_norm,none": 0.3490566037735849,
						"acc_norm_stderr,none": 0.04651841326529027,
						"acc_stderr,none": 0.04651841326529027,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.040191074725573483,
						"acc_stderr,none": 0.040191074725573483,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714283,
						"acc_stderr,none": 0.04285714285714283,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.27106227106227104,
						"acc_norm,none": 0.27106227106227104,
						"acc_norm_stderr,none": 0.026952266920703325,
						"acc_stderr,none": 0.026952266920703325,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.31862745098039214,
						"acc_norm,none": 0.31862745098039214,
						"acc_norm_stderr,none": 0.0327028718148208,
						"acc_stderr,none": 0.0327028718148208,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.28654970760233917,
						"acc_norm,none": 0.28654970760233917,
						"acc_norm_stderr,none": 0.03467826685703826,
						"acc_stderr,none": 0.03467826685703826,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.03711513959675177,
						"acc_stderr,none": 0.03711513959675177,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2589928057553957,
						"acc_norm,none": 0.2589928057553957,
						"acc_norm_stderr,none": 0.037291986581642324,
						"acc_stderr,none": 0.037291986581642324,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3081761006289308,
						"acc_norm,none": 0.3081761006289308,
						"acc_norm_stderr,none": 0.03673404171124564,
						"acc_stderr,none": 0.03673404171124564,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.34355828220858897,
						"acc_norm,none": 0.34355828220858897,
						"acc_norm_stderr,none": 0.03731133519673893,
						"acc_stderr,none": 0.03731133519673893,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.034300856070148815,
						"acc_stderr,none": 0.034300856070148815,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.02839429305079051,
						"acc_stderr,none": 0.02839429305079051,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2676767676767677,
						"acc_norm,none": 0.2676767676767677,
						"acc_norm_stderr,none": 0.031544498882702866,
						"acc_stderr,none": 0.031544498882702866,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.40756302521008403,
						"acc_norm,none": 0.40756302521008403,
						"acc_norm_stderr,none": 0.03191863374478465,
						"acc_stderr,none": 0.03191863374478465,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2391304347826087,
						"acc_norm,none": 0.2391304347826087,
						"acc_norm_stderr,none": 0.028187385293933945,
						"acc_stderr,none": 0.028187385293933945,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.038221270785361555,
						"acc_stderr,none": 0.038221270785361555,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2897727272727273,
						"acc_norm,none": 0.2897727272727273,
						"acc_norm_stderr,none": 0.034293230802398746,
						"acc_stderr,none": 0.034293230802398746,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2953020134228188,
						"acc_norm,none": 0.2953020134228188,
						"acc_norm_stderr,none": 0.0374976336452705,
						"acc_stderr,none": 0.0374976336452705,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.288135593220339,
						"acc_norm,none": 0.288135593220339,
						"acc_norm_stderr,none": 0.04187011593049808,
						"acc_stderr,none": 0.04187011593049808,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.32867132867132864,
						"acc_norm,none": 0.32867132867132864,
						"acc_norm_stderr,none": 0.03941888501263191,
						"acc_stderr,none": 0.03941888501263191,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.04006168083848877,
						"acc_stderr,none": 0.04006168083848877,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.032314709966177586,
						"acc_stderr,none": 0.032314709966177586,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.31976744186046513,
						"acc_norm,none": 0.31976744186046513,
						"acc_norm_stderr,none": 0.0356654553808481,
						"acc_stderr,none": 0.0356654553808481,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26763990267639903,
						"acc_norm,none": 0.26763990267639903,
						"acc_norm_stderr,none": 0.02186481666367267,
						"acc_stderr,none": 0.02186481666367267,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3691588785046729,
						"acc_norm,none": 0.3691588785046729,
						"acc_norm_stderr,none": 0.03306563404172723,
						"acc_stderr,none": 0.03306563404172723,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.04119323030208568,
						"acc_stderr,none": 0.04119323030208568,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.28688524590163933,
						"acc_norm,none": 0.28688524590163933,
						"acc_norm_stderr,none": 0.04111886635267183,
						"acc_stderr,none": 0.04111886635267183,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.319047619047619,
						"acc_norm,none": 0.319047619047619,
						"acc_norm_stderr,none": 0.03224133248962465,
						"acc_stderr,none": 0.03224133248962465,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3055555555555556,
						"acc_norm,none": 0.3055555555555556,
						"acc_norm_stderr,none": 0.03443002441392582,
						"acc_stderr,none": 0.03443002441392582,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31216931216931215,
						"acc_norm,none": 0.31216931216931215,
						"acc_norm_stderr,none": 0.03379535035917228,
						"acc_stderr,none": 0.03379535035917228,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.28448275862068967,
						"acc_norm,none": 0.28448275862068967,
						"acc_norm_stderr,none": 0.0420716075558402,
						"acc_stderr,none": 0.0420716075558402,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2689655172413793,
						"acc_norm,none": 0.2689655172413793,
						"acc_norm_stderr,none": 0.036951833116502325,
						"acc_stderr,none": 0.036951833116502325,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.045521571818039494,
						"acc_stderr,none": 0.045521571818039494,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2742857142857143,
						"acc_norm,none": 0.2742857142857143,
						"acc_norm_stderr,none": 0.03382281937517294,
						"acc_stderr,none": 0.03382281937517294,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767256,
						"acc_stderr,none": 0.030113040167767256,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2579787234042553,
						"acc_norm,none": 0.2579787234042553,
						"acc_norm_stderr,none": 0.022593550801056263,
						"acc_stderr,none": 0.022593550801056263,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.34051724137931033,
						"acc_norm,none": 0.34051724137931033,
						"acc_norm_stderr,none": 0.03117922285925479,
						"acc_stderr,none": 0.03117922285925479,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3218390804597701,
						"acc_norm,none": 0.3218390804597701,
						"acc_norm_stderr,none": 0.03551916251914105,
						"acc_stderr,none": 0.03551916251914105,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03944624162501116,
						"acc_stderr,none": 0.03944624162501116,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3008849557522124,
						"acc_norm,none": 0.3008849557522124,
						"acc_norm_stderr,none": 0.030576185297580976,
						"acc_stderr,none": 0.030576185297580976,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0368105086916155,
						"acc_stderr,none": 0.0368105086916155,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.032530209055933366,
						"acc_stderr,none": 0.032530209055933366,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.32298136645962733,
						"acc_norm,none": 0.32298136645962733,
						"acc_norm_stderr,none": 0.03696826370174652,
						"acc_stderr,none": 0.03696826370174652,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.31875,
						"acc_norm,none": 0.31875,
						"acc_norm_stderr,none": 0.036955560385363254,
						"acc_stderr,none": 0.036955560385363254,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.1417535089631069,
						"mcc_stderr,none": 0.03181399358723615
					},
					"copa": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.03588702812826371,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.522529069767442,
						"likelihood_diff_stderr,none": 0.47859364769147594,
						"pct_stereotype,none": 0.6107632677400118,
						"pct_stereotype_stderr,none": 0.06872407549545181
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.6644305307096006,
						"likelihood_diff_stderr,none": 0.08672330346786733,
						"pct_stereotype,none": 0.6404293381037567,
						"pct_stereotype_stderr,none": 0.011721703372467203
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.197802197802198,
						"likelihood_diff_stderr,none": 0.39548768374018717,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.04865042554105199
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.806818181818182,
						"likelihood_diff_stderr,none": 1.7794090276277978,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.876923076923077,
						"likelihood_diff_stderr,none": 0.6308886788871148,
						"pct_stereotype,none": 0.7538461538461538,
						"pct_stereotype_stderr,none": 0.05384615384615383
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.663671875,
						"likelihood_diff_stderr,none": 0.16067993905562217,
						"pct_stereotype,none": 0.659375,
						"pct_stereotype_stderr,none": 0.026534392975531503
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.5711805555555554,
						"likelihood_diff_stderr,none": 0.23635154458168547,
						"pct_stereotype,none": 0.5694444444444444,
						"pct_stereotype_stderr,none": 0.03376922151252336
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.782986111111111,
						"likelihood_diff_stderr,none": 0.3238641168072539,
						"pct_stereotype,none": 0.7361111111111112,
						"pct_stereotype_stderr,none": 0.05230618728513982
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.47367125984252,
						"likelihood_diff_stderr,none": 0.1497303628816483,
						"pct_stereotype,none": 0.5452755905511811,
						"pct_stereotype_stderr,none": 0.022114553870695327
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.5518018018018016,
						"likelihood_diff_stderr,none": 0.3408617060820842,
						"pct_stereotype,none": 0.7207207207207207,
						"pct_stereotype_stderr,none": 0.04277662524881439
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.958333333333333,
						"likelihood_diff_stderr,none": 0.44072657012606453,
						"pct_stereotype,none": 0.9032258064516129,
						"pct_stereotype_stderr,none": 0.03082364793244869
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.214473684210526,
						"likelihood_diff_stderr,none": 0.25000230839192805,
						"pct_stereotype,none": 0.6631578947368421,
						"pct_stereotype_stderr,none": 0.034378803407483234
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.3799567680381633,
						"likelihood_diff_stderr,none": 0.07768162651730583,
						"pct_stereotype,none": 0.5802027429934407,
						"pct_stereotype_stderr,none": 0.012055151703206035
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.425,
						"likelihood_diff_stderr,none": 0.3203328578607235,
						"pct_stereotype,none": 0.5444444444444444,
						"pct_stereotype_stderr,none": 0.05279009646630345
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.423076923076923,
						"likelihood_diff_stderr,none": 0.6854629584854467,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.840909090909091,
						"likelihood_diff_stderr,none": 0.4406049944998533,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.056159743502623156
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.8173676012461057,
						"likelihood_diff_stderr,none": 0.1294370242621292,
						"pct_stereotype,none": 0.5669781931464174,
						"pct_stereotype_stderr,none": 0.027698935090249916
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.632905138339921,
						"likelihood_diff_stderr,none": 0.21630419628035827,
						"pct_stereotype,none": 0.4308300395256917,
						"pct_stereotype_stderr,none": 0.03119418930984328
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.5902777777777777,
						"likelihood_diff_stderr,none": 0.47234518674276144,
						"pct_stereotype,none": 0.6388888888888888,
						"pct_stereotype_stderr,none": 0.057003814617008604
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.0565217391304347,
						"likelihood_diff_stderr,none": 0.143172594281366,
						"pct_stereotype,none": 0.5195652173913043,
						"pct_stereotype_stderr,none": 0.02332012708760827
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.4456521739130435,
						"likelihood_diff_stderr,none": 0.2927548504252623,
						"pct_stereotype,none": 0.7478260869565218,
						"pct_stereotype_stderr,none": 0.04067222754154717
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.9395604395604398,
						"likelihood_diff_stderr,none": 0.3367122847303755,
						"pct_stereotype,none": 0.7472527472527473,
						"pct_stereotype_stderr,none": 0.04580951853732889
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.923150510204082,
						"likelihood_diff_stderr,none": 0.26141299498272574,
						"pct_stereotype,none": 0.7244897959183674,
						"pct_stereotype_stderr,none": 0.03199393624667902
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.10236220472440945,
						"exact_match_stderr,none": 0.0067261325125403445
					},
					"glue": {
						"acc,none": 0.7373005002382087,
						"acc_stderr,none": 0.004347610963871786,
						"alias": "glue",
						"f1,none": 0.6915895843189218,
						"f1_stderr,none": 0.0001783729661553466,
						"mcc,none": 0.1417535089631069,
						"mcc_stderr,none": 0.03181399358723615
					},
					"hellaswag": {
						"acc,none": 0.5374427404899422,
						"acc_norm,none": 0.724457279426409,
						"acc_norm_stderr,none": 0.004458742356237878,
						"acc_stderr,none": 0.004975770805464642,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2907883338146116,
						"acc_norm,none": 0.2907883338146116,
						"acc_norm_stderr,none": 0.03058386605485869,
						"acc_stderr,none": 0.03058386605485869,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816508,
						"acc_stderr,none": 0.04229525846816508,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.287,
						"acc_norm,none": 0.287,
						"acc_norm_stderr,none": 0.014312087053809963,
						"acc_stderr,none": 0.014312087053809963,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.294,
						"acc_norm,none": 0.294,
						"acc_norm_stderr,none": 0.014414290540008213,
						"acc_stderr,none": 0.014414290540008213,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.01382541652689501,
						"acc_stderr,none": 0.01382541652689501,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485251,
						"acc_stderr,none": 0.014174516461485251,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.26166666666666666,
						"acc_norm,none": 0.26166666666666666,
						"acc_norm_stderr,none": 0.017959201687318422,
						"acc_stderr,none": 0.017959201687318422,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.308,
						"acc_norm,none": 0.308,
						"acc_norm_stderr,none": 0.014606483127342761,
						"acc_stderr,none": 0.014606483127342761,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.357,
						"acc_norm,none": 0.357,
						"acc_norm_stderr,none": 0.01515852172148677,
						"acc_stderr,none": 0.01515852172148677,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.318,
						"acc_norm,none": 0.318,
						"acc_norm_stderr,none": 0.014734079309311901,
						"acc_stderr,none": 0.014734079309311901,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.03182868716477582,
						"acc_stderr,none": 0.03182868716477582,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.322,
						"acc_norm,none": 0.322,
						"acc_norm_stderr,none": 0.014782913600996666,
						"acc_stderr,none": 0.014782913600996666,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2076923076923077,
						"acc_norm,none": 0.2076923076923077,
						"acc_norm_stderr,none": 0.03571595663393523,
						"acc_stderr,none": 0.03571595663393523,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909284,
						"acc_stderr,none": 0.04292346959909284,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.315,
						"acc_norm,none": 0.315,
						"acc_norm_stderr,none": 0.014696631960792505,
						"acc_stderr,none": 0.014696631960792505,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.332,
						"acc_norm,none": 0.332,
						"acc_norm_stderr,none": 0.01489959724281149,
						"acc_stderr,none": 0.01489959724281149,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.258,
						"acc_norm,none": 0.258,
						"acc_norm_stderr,none": 0.013842963108656603,
						"acc_stderr,none": 0.013842963108656603,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.308,
						"acc_norm,none": 0.308,
						"acc_norm_stderr,none": 0.01460648312734276,
						"acc_stderr,none": 0.01460648312734276,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809954,
						"acc_stderr,none": 0.013963164754809954,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.286,
						"acc_norm,none": 0.286,
						"acc_norm_stderr,none": 0.014297146862517908,
						"acc_stderr,none": 0.014297146862517908,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.296,
						"acc_norm,none": 0.296,
						"acc_norm_stderr,none": 0.014442734941575025,
						"acc_stderr,none": 0.014442734941575025,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.321,
						"acc_norm,none": 0.321,
						"acc_norm_stderr,none": 0.014770821817934649,
						"acc_stderr,none": 0.014770821817934649,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.04512608598542127,
						"acc_stderr,none": 0.04512608598542127,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.302,
						"acc_norm,none": 0.302,
						"acc_norm_stderr,none": 0.014526080235459546,
						"acc_stderr,none": 0.014526080235459546,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.321,
						"acc_norm,none": 0.321,
						"acc_norm_stderr,none": 0.014770821817934645,
						"acc_stderr,none": 0.014770821817934645,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.014236526215291338,
						"acc_stderr,none": 0.014236526215291338,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485261,
						"acc_stderr,none": 0.014174516461485261,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.236,
						"acc_norm,none": 0.236,
						"acc_norm_stderr,none": 0.013434451402438681,
						"acc_stderr,none": 0.013434451402438681,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.2866666666666667,
						"acc_norm,none": 0.2866666666666667,
						"acc_norm_stderr,none": 0.01847657402752119,
						"acc_stderr,none": 0.01847657402752119,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440467,
						"acc_stderr,none": 0.013946271849440467,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.292,
						"acc_norm,none": 0.292,
						"acc_norm_stderr,none": 0.01438551156347734,
						"acc_stderr,none": 0.01438551156347734,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.302,
						"acc_norm,none": 0.302,
						"acc_norm_stderr,none": 0.014526080235459546,
						"acc_stderr,none": 0.014526080235459546,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.307,
						"acc_norm,none": 0.307,
						"acc_norm_stderr,none": 0.014593284892852628,
						"acc_stderr,none": 0.014593284892852628,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.04163331998932269,
						"acc_stderr,none": 0.04163331998932269,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.025366873297069225,
						"acc_stderr,none": 0.025366873297069225,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.013912208651021357,
						"acc_stderr,none": 0.013912208651021357,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.303,
						"acc_norm,none": 0.303,
						"acc_norm_stderr,none": 0.014539683710535285,
						"acc_stderr,none": 0.014539683710535285,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.284,
						"acc_norm,none": 0.284,
						"acc_norm_stderr,none": 0.014267009061031306,
						"acc_stderr,none": 0.014267009061031306,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.030695456590127176,
						"acc_stderr,none": 0.030695456590127176,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.014205696104091496,
						"acc_stderr,none": 0.014205696104091496,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633915,
						"acc_stderr,none": 0.014046255632633915,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.030897382432918605,
						"acc_stderr,none": 0.030897382432918605,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.347,
						"acc_norm,none": 0.347,
						"acc_norm_stderr,none": 0.015060472031706618,
						"acc_stderr,none": 0.015060472031706618,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5174303880727911,
						"acc_norm,none": 0.562,
						"acc_norm_stderr,none": 0.00049329859719439,
						"acc_stderr,none": 0.04506301520954102,
						"alias": "kobest",
						"f1,none": 0.412674289091718,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5064102564102564,
						"acc_stderr,none": 0.013347670414620429,
						"alias": " - kobest_boolq",
						"f1,none": 0.34497952413042354,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.618,
						"acc_stderr,none": 0.015372453034968517,
						"alias": " - kobest_copa",
						"f1,none": 0.6167106145072022,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.416,
						"acc_norm,none": 0.562,
						"acc_norm_stderr,none": 0.02221032636397741,
						"acc_stderr,none": 0.022064943313928876,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.41093892842881596,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5214105793450882,
						"acc_stderr,none": 0.025102898696363056,
						"alias": " - kobest_sentineg",
						"f1,none": 0.39928962793068296,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4888888888888889,
						"acc_stderr,none": 0.014088017407699532,
						"alias": " - kobest_wic",
						"f1,none": 0.3310778727445394,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7140500679215991,
						"acc_stderr,none": 0.017605085946934364,
						"alias": "lambada",
						"perplexity,none": 3.635681602620688,
						"perplexity_stderr,none": 0.19029035463499064
					},
					"lambada_cloze": {
						"acc,none": 0.038812342324859306,
						"acc_stderr,none": 0.004256370495481683,
						"alias": "lambada_cloze",
						"perplexity,none": 465.9259575971975,
						"perplexity_stderr,none": 90.13327932279957
					},
					"lambada_multilingual": {
						"acc,none": 0.5372016301183776,
						"acc_stderr,none": 0.08533120145608283,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.407895824219466,
						"perplexity_stderr,none": 8.516926622702716
					},
					"lambada_openai": {
						"acc,none": 0.7477197748884146,
						"acc_stderr,none": 0.006050943684570122,
						"alias": " - lambada_openai",
						"perplexity,none": 3.2856293242229415,
						"perplexity_stderr,none": 0.06388020579632159
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.032214244129633224,
						"acc_stderr,none": 0.002459946480253566,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 288.3595232684572,
						"perplexity_stderr,none": 8.98555997301306
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42829419755482245,
						"acc_stderr,none": 0.006893971254195145,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 35.83282427210745,
						"perplexity_stderr,none": 2.011202538602497
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.746749466330293,
						"acc_stderr,none": 0.006058634002437434,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.2845471742221335,
						"perplexity_stderr,none": 0.06387306613320694
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4541044052008539,
						"acc_stderr,none": 0.006936569231082089,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.893712264173548,
						"perplexity_stderr,none": 1.4274525882393623
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5439549776829031,
						"acc_stderr,none": 0.006939008354532882,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.717504097191252,
						"perplexity_stderr,none": 0.8147073962668391
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5129051038230157,
						"acc_stderr,none": 0.006963657019056759,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.310891313402934,
						"perplexity_stderr,none": 1.1945572032809175
					},
					"lambada_standard": {
						"acc,none": 0.6811566078012808,
						"acc_stderr,none": 0.006492684061449837,
						"alias": " - lambada_standard",
						"perplexity,none": 3.987082304231078,
						"perplexity_stderr,none": 0.08122077327847523
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.04541044052008539,
						"acc_stderr,none": 0.0029006696462560095,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 643.4923919259378,
						"perplexity_stderr,none": 20.02182306120468
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.3237913486005089,
						"exact_match_stderr,get-answer": 0.01180551369127738
					},
					"logiqa": {
						"acc,none": 0.24270353302611367,
						"acc_norm,none": 0.2872503840245776,
						"acc_norm_stderr,none": 0.017747701948846593,
						"acc_stderr,none": 0.01681567620647953,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2589058524173028,
						"acc_norm,none": 0.2907124681933842,
						"acc_norm_stderr,none": 0.011456577557813217,
						"acc_stderr,none": 0.01105145686861053,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25996649916247905,
						"acc_norm,none": 0.2659966499162479,
						"acc_norm_stderr,none": 0.008088867008866091,
						"acc_stderr,none": 0.008029434758777931,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.43804278754501164,
						"acc_stderr,none": 0.005106236115239062,
						"alias": "mc_taco",
						"f1,none": 0.536755718526279,
						"f1_stderr,none": 0.005654468235356496
					},
					"medmcqa": {
						"acc,none": 0.3442505378914655,
						"acc_norm,none": 0.3442505378914655,
						"acc_norm_stderr,none": 0.007347071579681349,
						"acc_stderr,none": 0.007347071579681349,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3519245875883739,
						"acc_norm,none": 0.3519245875883739,
						"acc_norm_stderr,none": 0.013390414535997423,
						"acc_stderr,none": 0.013390414535997423,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.3934624697336562,
						"acc_stderr,none": 0.08219362248241792,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036844,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4148148148148148,
						"acc_stderr,none": 0.042561937679014075,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3815789473684211,
						"acc_stderr,none": 0.03953173377749194,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.37358490566037733,
						"acc_stderr,none": 0.02977308271331987,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4166666666666667,
						"acc_stderr,none": 0.04122728707651282,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3583815028901734,
						"acc_stderr,none": 0.036563436533531585,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171452,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.049888765156985884,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.37872340425531914,
						"acc_stderr,none": 0.031709956060406545,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.041424397194893596,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.04082482904638628,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.29894179894179895,
						"acc_stderr,none": 0.023577604791655788,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3253968253968254,
						"acc_stderr,none": 0.041905964388711366,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.45483870967741935,
						"acc_stderr,none": 0.028327743091561067,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.26108374384236455,
						"acc_stderr,none": 0.030903796952114482,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5212121212121212,
						"acc_stderr,none": 0.03900828913737302,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4494949494949495,
						"acc_stderr,none": 0.03544132491947969,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5284974093264249,
						"acc_stderr,none": 0.03602573571288443,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.36153846153846153,
						"acc_stderr,none": 0.024359581465397,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3111111111111111,
						"acc_stderr,none": 0.028226446749683515,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.37815126050420167,
						"acc_stderr,none": 0.031499305777849054,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.25165562913907286,
						"acc_stderr,none": 0.035433042343899844,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.4935779816513762,
						"acc_stderr,none": 0.021435554820013077,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.02988691054762697,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4803921568627451,
						"acc_stderr,none": 0.03506612560524866,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5443037974683544,
						"acc_stderr,none": 0.03241920684693335,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4125560538116592,
						"acc_stderr,none": 0.03304062175449297,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5038167938931297,
						"acc_stderr,none": 0.043851623256015534,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3691817215727949,
						"acc_stderr,none": 0.08247906472012485,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4214876033057851,
						"acc_stderr,none": 0.04507732278775094,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.4166666666666667,
						"acc_stderr,none": 0.04766075165356461,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3803680981595092,
						"acc_stderr,none": 0.03814269893261835,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.04432804055291519,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4854368932038835,
						"acc_stderr,none": 0.049486373240266376,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6196581196581197,
						"acc_stderr,none": 0.031804252043840985,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001974,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5721583652618135,
						"acc_stderr,none": 0.017692787927803728,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3554913294797688,
						"acc_stderr,none": 0.025770292082977247,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.22346368715083798,
						"acc_stderr,none": 0.01393206863857976,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.42810457516339867,
						"acc_stderr,none": 0.028332397483664274,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.4476987447698745,
						"acc_stderr,none": 0.08220735939276784,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.48231511254019294,
						"acc_stderr,none": 0.02838032284907713,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4537037037037037,
						"acc_stderr,none": 0.0277012284685426,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2872340425531915,
						"acc_stderr,none": 0.02699219917306436,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.32920469361147325,
						"acc_stderr,none": 0.012002091666902298,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.43014705882352944,
						"acc_stderr,none": 0.030074971917302875,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3954248366013072,
						"acc_stderr,none": 0.01978046595477752,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4636363636363636,
						"acc_stderr,none": 0.04776449162396197,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.33877551020408164,
						"acc_stderr,none": 0.03029950656215418,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.42996425089372764,
						"acc_stderr,none": 0.06515156424970105,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5422885572139303,
						"acc_stderr,none": 0.035228658640995975,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.340627973358706,
						"acc_stderr,none": 0.0714832190512744,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.05024183937956912,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.03711725190740749,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.5847953216374269,
						"acc_stderr,none": 0.03779275945503201,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7862455425369332,
						"acc_stderr,none": 0.004138220819214386,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.790073230268511,
						"acc_stderr,none": 0.004107416439553579,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7352941176470589,
						"acc_stderr,none": 0.02186830575426217,
						"alias": "mrpc",
						"f1,none": 0.8333333333333334,
						"f1_stderr,none": 0.01579674386716256
					},
					"multimedqa": {
						"acc,none": 0.3801277501774308,
						"acc_norm,none": 0.34675858774560314,
						"acc_norm_stderr,none": 0.00010002717895680177,
						"acc_stderr,none": 0.08866480435737589,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.4900990099009901,
						"acc_stderr,none": 0.007180394909281877,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7012791588868836,
						"mrr_stderr,none": 0.010373826211148395,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.41309255079006774,
						"r@2_stderr,none": 0.016551480902963107
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6528404831011732,
						"mrr_stderr,none": 0.010406838465560862,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.47404063205417607,
						"r@2_stderr,none": 0.016784648326758043
					},
					"openbookqa": {
						"acc,none": 0.292,
						"acc_norm,none": 0.41,
						"acc_norm_stderr,none": 0.02201748257812768,
						"acc_stderr,none": 0.020354375480530082,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4415,
						"acc_stderr,none": 0.011106329288974688,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3755,
						"acc_stderr,none": 0.010830906206990815,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.445,
						"acc_stderr,none": 0.01111527213509921,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5455,
						"acc_stderr,none": 0.011136735987003715,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.011131484850525779,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5415,
						"acc_stderr,none": 0.01114454913793035,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.503,
						"acc_stderr,none": 0.011182934722804561,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48571428571428565,
						"acc_stderr,none": 0.05159348161594731,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7742110990206746,
						"acc_norm,none": 0.7850924918389554,
						"acc_norm_stderr,none": 0.009583665082653302,
						"acc_stderr,none": 0.00975498067091732,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.24460930828351837,
						"acc_norm,none": 0.28944278394534584,
						"acc_norm_stderr,none": 0.0033132487422531795,
						"acc_stderr,none": 0.0031404758315583575,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177528,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7331581256726234,
						"acc_norm,none": 0.6445504481874059,
						"acc_norm_stderr,none": 0.009841657213670217,
						"acc_stderr,none": 0.15461466601613438,
						"alias": "pythia",
						"bits_per_byte,none": 0.6344254256651548,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5523193930439774,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2856293242229415,
						"perplexity_stderr,none": 0.06388020579632159,
						"word_perplexity,none": 10.5017557946628,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.39184397163120566,
						"acc_norm,none": 0.44148936170212766,
						"acc_norm_stderr,none": 0.058380053763474334,
						"acc_stderr,none": 0.04842372839967371,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4666666666666667,
						"acc_norm,none": 0.5416666666666666,
						"acc_norm_stderr,none": 0.04567549854280213,
						"acc_stderr,none": 0.0457329560380023,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.34375,
						"acc_norm,none": 0.4625,
						"acc_norm_stderr,none": 0.039540899134978165,
						"acc_stderr,none": 0.03766668927755763,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3873239436619718,
						"acc_norm,none": 0.3873239436619718,
						"acc_norm_stderr,none": 0.02895738957595096,
						"acc_stderr,none": 0.02895738957595096,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7429631461785803,
						"acc_stderr,none": 0.0021733758376830826,
						"alias": "qqp",
						"f1,none": 0.690419447092469,
						"f1_stderr,none": 0.0028769735736541074
					},
					"race": {
						"acc,none": 0.3339712918660287,
						"acc_stderr,none": 0.014596569299709724,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2715,
						"em_stderr,none": 0.004447555854096394,
						"f1,none": 0.28129523834586145,
						"f1_stderr,none": 0.004458441770238439
					},
					"rte": {
						"acc,none": 0.6895306859205776,
						"acc_stderr,none": 0.02785041039263069,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.96,
						"acc_norm,none": 0.955,
						"acc_norm_stderr,none": 0.006558812241406088,
						"acc_stderr,none": 0.006199874066337061,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6859205776173285,
						"acc_stderr,none": 0.027938437681209072,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.01120598290257748,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5774767569729081,
						"acc_norm,none": 0.7685694291712486,
						"acc_norm_stderr,none": 0.0029818283419185123,
						"acc_stderr,none": 0.00349239436516661,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.706199460916442,
						"acc_stderr,none": 0.07057610572446903,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6480368589743589,
						"acc_stderr,none": 0.0047798900962643725,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8939900679031114,
						"acc_stderr,none": 0.0030993389170034884,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5814705882352941,
						"acc_stderr,none": 0.004884814451137176,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3193481401332778,
						"acc_stderr,none": 0.001601972604193396,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3219094247246022,
						"bleu_acc_stderr,none": 0.0163555676119604,
						"bleu_diff,none": -7.40610643180493,
						"bleu_diff_stderr,none": 0.8747449309518434,
						"bleu_max,none": 26.437827361371447,
						"bleu_max_stderr,none": 0.8076780035603734,
						"rouge1_acc,none": 0.2839657282741738,
						"rouge1_acc_stderr,none": 0.01578537085839672,
						"rouge1_diff,none": -9.611723329840688,
						"rouge1_diff_stderr,none": 0.9554416340882098,
						"rouge1_max,none": 51.45703794169747,
						"rouge1_max_stderr,none": 0.8862402288862491,
						"rouge2_acc,none": 0.25458996328029376,
						"rouge2_acc_stderr,none": 0.015250117079156494,
						"rouge2_diff,none": -11.56260229570307,
						"rouge2_diff_stderr,none": 1.1507108451845416,
						"rouge2_max,none": 35.272306655035464,
						"rouge2_max_stderr,none": 1.0358834198913611,
						"rougeL_acc,none": 0.2876376988984088,
						"rougeL_acc_stderr,none": 0.01584631510139481,
						"rougeL_diff,none": -9.970635212767064,
						"rougeL_diff_stderr,none": 0.9713505887964828,
						"rougeL_max,none": 48.544789560203014,
						"rougeL_max_stderr,none": 0.9033654258123315
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3219094247246022,
						"bleu_acc_stderr,none": 0.0163555676119604,
						"bleu_diff,none": -7.40610643180493,
						"bleu_diff_stderr,none": 0.8747449309518434,
						"bleu_max,none": 26.437827361371447,
						"bleu_max_stderr,none": 0.8076780035603734,
						"rouge1_acc,none": 0.2839657282741738,
						"rouge1_acc_stderr,none": 0.01578537085839672,
						"rouge1_diff,none": -9.611723329840688,
						"rouge1_diff_stderr,none": 0.9554416340882098,
						"rouge1_max,none": 51.45703794169747,
						"rouge1_max_stderr,none": 0.8862402288862491,
						"rouge2_acc,none": 0.25458996328029376,
						"rouge2_acc_stderr,none": 0.015250117079156494,
						"rouge2_diff,none": -11.56260229570307,
						"rouge2_diff_stderr,none": 1.1507108451845416,
						"rouge2_max,none": 35.272306655035464,
						"rouge2_max_stderr,none": 1.0358834198913611,
						"rougeL_acc,none": 0.2876376988984088,
						"rougeL_acc_stderr,none": 0.01584631510139481,
						"rougeL_diff,none": -9.970635212767064,
						"rougeL_diff_stderr,none": 0.9713505887964828,
						"rougeL_max,none": 48.544789560203014,
						"rougeL_max_stderr,none": 0.9033654258123315
					},
					"truthfulqa_mc1": {
						"acc,none": 0.24479804161566707,
						"acc_stderr,none": 0.015051869486715004,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3938982386508885,
						"acc_stderr,none": 0.014038717056980054,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.10236220472440945,
						"exact_match_stderr,none": 0.0067261325125403445
					},
					"wic": {
						"acc,none": 0.5799373040752351,
						"acc_stderr,none": 0.01955590253723442,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6344254256651548,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5523193930439774,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.5017557946628,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7032359905288083,
						"acc_stderr,none": 0.012839239695202028,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4788732394366197,
						"acc_stderr,none": 0.05970805879899504,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.40384615384615385,
						"acc_stderr,none": 0.048346889526540184,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8608058608058609,
						"acc_stderr,none": 0.020988366070851007,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6183636363636363,
						"acc_stderr,none": 0.06879776772236333,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.022017482578127683,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.02043534209189613,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.019827714859587578,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.022064943313928866,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.02206494331392886,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.021513662527582404,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177524,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.020776701920308997,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4351807228915663,
						"acc_stderr,none": 0.050261342835443056,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667057,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4759036144578313,
						"acc_stderr,none": 0.01001042775321067,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4843373493975904,
						"acc_stderr,none": 0.010017154458106753,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38755020080321284,
						"acc_stderr,none": 0.009765326832218988,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5216867469879518,
						"acc_stderr,none": 0.010012641367065514,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5028112449799197,
						"acc_stderr,none": 0.010021914455122176,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4967871485943775,
						"acc_stderr,none": 0.010021865961119555,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42168674698795183,
						"acc_stderr,none": 0.009898379493335451,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.485140562248996,
						"acc_stderr,none": 0.01001764608425538,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3967871485943775,
						"acc_stderr,none": 0.009806220246670024,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40281124497991966,
						"acc_stderr,none": 0.009830919849814063,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4530120481927711,
						"acc_stderr,none": 0.00997771990435373,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40642570281124496,
						"acc_stderr,none": 0.009844999034464197,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40923694779116465,
						"acc_stderr,none": 0.009855567414480236,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3497991967871486,
						"acc_stderr,none": 0.009559181474778284,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6255339630587811,
						"acc_stderr,none": 0.06260157082331846,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5903375248180013,
						"acc_stderr,none": 0.012655369030750355,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7789543348775645,
						"acc_stderr,none": 0.010678457581809242,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7114493712772998,
						"acc_stderr,none": 0.011659892295188158,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5684976836532097,
						"acc_stderr,none": 0.01274581004609841,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5969556585043018,
						"acc_stderr,none": 0.012622895215907709,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6637988087359364,
						"acc_stderr,none": 0.012157083081239748,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5334215751158173,
						"acc_stderr,none": 0.01283834793473168,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6770350761085374,
						"acc_stderr,none": 0.012033578346967671,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5473196558570483,
						"acc_stderr,none": 0.012809372866181957,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.585704831237591,
						"acc_stderr,none": 0.012676689821720669,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6273990734612839,
						"acc_stderr,none": 0.012442436359907102,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8174870757473589,
						"acc_stderr,none": 0.035900324276513525,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8748387096774194,
						"acc_stderr,none": 0.006864058081211033,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7228915662650602,
						"acc_stderr,none": 0.04942589299783093,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7559958289885297,
						"acc_stderr,none": 0.013876360379829226,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7756653992395437,
						"acc_stderr,none": 0.025771203207084713,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6825396825396826,
						"acc_stderr,none": 0.026269018848607703,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7916666666666666,
						"acc_stderr,none": 0.01810783666315205,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C1-rwkv-340_pth"
	},
	"./rwkv-x-dev/1_3-C1-rwkv-390_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6406426155580609,
						"acc_norm,none": 0.6426155580608793,
						"acc_norm_stderr,none": 0.08906148443002737,
						"acc_stderr,none": 0.10646302490175276,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.488125,
						"acc_stderr,none": 0.04960925793958703,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.07005,
						"acc_stderr,none": 0.05237911946737096,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8076865671641792,
						"acc_stderr,none": 0.1659005530856495,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.27637444279346207,
						"acc_norm,none": 0.27637444279346207,
						"acc_norm_stderr,none": 0.12130442877275041,
						"acc_stderr,none": 0.12130442877275041,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2930409255741669,
						"acc_norm,none": 0.2930409255741669,
						"acc_norm_stderr,none": 0.050906020281709326,
						"acc_stderr,none": 0.050906020281709326,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.53166927549195,
						"likelihood_diff_stderr,none": 0.4813103227405036,
						"pct_stereotype,none": 0.6092725104353012,
						"pct_stereotype_stderr,none": 0.06886953129381028
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.09744094488188976,
						"exact_match_stderr,none": 0.006580419724685966
					},
					"glue": {
						"acc,none": 0.733906026679371,
						"acc_stderr,none": 0.004181084270866984,
						"alias": "glue",
						"f1,none": 0.6836615927918375,
						"f1_stderr,none": 0.00019820154424976516,
						"mcc,none": 0.18552761164714818,
						"mcc_stderr,none": 0.03263383144957462
					},
					"kmmlu": {
						"acc,none": 0.2904418134565407,
						"acc_norm,none": 0.2904418134565407,
						"acc_norm_stderr,none": 0.030307895154149046,
						"acc_stderr,none": 0.030307895154149046,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5150186362639771,
						"acc_norm,none": 0.568,
						"acc_norm_stderr,none": 0.0004917354709418831,
						"acc_stderr,none": 0.04483636067141316,
						"alias": "kobest",
						"f1,none": 0.41250565165853087,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7108480496797982,
						"acc_stderr,none": 0.01783916166560649,
						"alias": "lambada",
						"perplexity,none": 3.6897960827608927,
						"perplexity_stderr,none": 0.2036544107354847
					},
					"lambada_cloze": {
						"acc,none": 0.03415486124587619,
						"acc_stderr,none": 0.0037119704020330112,
						"alias": "lambada_cloze",
						"perplexity,none": 516.967698388973,
						"perplexity_stderr,none": 105.66634817084605
					},
					"lambada_multilingual": {
						"acc,none": 0.5350281389481856,
						"acc_stderr,none": 0.08138382266812279,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.70331850556869,
						"perplexity_stderr,none": 8.455379183060192
					},
					"mmlu": {
						"acc,none": 0.39032901296111666,
						"acc_stderr,none": 0.08464436301362857,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3640807651434644,
						"acc_stderr,none": 0.08496979813396047,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.4431927904731252,
						"acc_stderr,none": 0.08949673726168551,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.42671433214169646,
						"acc_stderr,none": 0.0709954903179147,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3418966064065969,
						"acc_stderr,none": 0.06782984644227069,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.37885024840312276,
						"acc_norm,none": 0.3453393240464078,
						"acc_norm_stderr,none": 0.00010225001159147576,
						"acc_stderr,none": 0.08990379276638243,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4883571428571429,
						"acc_stderr,none": 0.050400093125208184,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7311814892579026,
						"acc_norm,none": 0.6465023477418785,
						"acc_norm_stderr,none": 0.010002717577357463,
						"acc_stderr,none": 0.15560356723689256,
						"alias": "pythia",
						"bits_per_byte,none": 0.6345998198671968,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5525070500726705,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.310458832487118,
						"perplexity_stderr,none": 0.06455342172655767,
						"word_perplexity,none": 10.508546370052967,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3971631205673759,
						"acc_norm,none": 0.4521276595744681,
						"acc_norm_stderr,none": 0.0597660080799269,
						"acc_stderr,none": 0.050531397619411646,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.7054673721340388,
						"acc_stderr,none": 0.07219777188751882,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3233447259750329,
						"acc_stderr,none": 0.001482427995604895,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3157894736842105,
						"bleu_acc_stderr,none": 0.016272287957916916,
						"bleu_diff,none": -7.5282040758627256,
						"bleu_diff_stderr,none": 0.8642504936435088,
						"bleu_max,none": 26.477625970727555,
						"bleu_max_stderr,none": 0.8046493977366398,
						"rouge1_acc,none": 0.27906976744186046,
						"rouge1_acc_stderr,none": 0.0157021070906279,
						"rouge1_diff,none": -9.61504869525477,
						"rouge1_diff_stderr,none": 0.9513326849526342,
						"rouge1_max,none": 51.72074865390213,
						"rouge1_max_stderr,none": 0.8841677319956579,
						"rouge2_acc,none": 0.2558139534883721,
						"rouge2_acc_stderr,none": 0.015274176219283361,
						"rouge2_diff,none": -11.761825712496476,
						"rouge2_diff_stderr,none": 1.146784137451743,
						"rouge2_max,none": 35.375626448829145,
						"rouge2_max_stderr,none": 1.0301814558490707,
						"rougeL_acc,none": 0.28518971848225216,
						"rougeL_acc_stderr,none": 0.015805827874454892,
						"rougeL_diff,none": -9.949937602219535,
						"rougeL_diff_stderr,none": 0.9687235296572093,
						"rougeL_max,none": 48.82845988968157,
						"rougeL_max_stderr,none": 0.9011067554350052
					},
					"xcopa": {
						"acc,none": 0.6187272727272727,
						"acc_stderr,none": 0.06898295782972137,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4359571619812584,
						"acc_stderr,none": 0.050701097013861586,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6256542927621683,
						"acc_stderr,none": 0.06133943754874467,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8165879973027647,
						"acc_stderr,none": 0.03640734210263105,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6406426155580609,
						"acc_norm,none": 0.6426155580608793,
						"acc_norm_stderr,none": 0.08906148443002737,
						"acc_stderr,none": 0.10646302490175276,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.488125,
						"acc_stderr,none": 0.04960925793958703,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.015560917136921665,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.444,
						"acc_stderr,none": 0.01571976816340209,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.01433543118137595,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.41552901023890787,
						"acc_norm,none": 0.454778156996587,
						"acc_norm_stderr,none": 0.014551507060836353,
						"acc_stderr,none": 0.01440136664121639,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7516835016835017,
						"acc_norm,none": 0.7352693602693603,
						"acc_norm_stderr,none": 0.009053021086173967,
						"acc_stderr,none": 0.008865199020660961,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.07005,
						"acc_stderr,none": 0.05237911946737096,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0385,
						"acc_stderr,none": 0.004303270159661543,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.205,
						"acc_stderr,none": 0.00902930031243103,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.08,
						"acc_stderr,none": 0.0060678174992828396,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.028,
						"acc_stderr,none": 0.003689824847293216,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.013,
						"acc_stderr,none": 0.002533517190523323,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.1345,
						"acc_stderr,none": 0.007631119969964948,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0035,
						"acc_stderr,none": 0.001320888857431576,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.124,
						"acc_stderr,none": 0.007371510671822562,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521609,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0725,
						"acc_stderr,none": 0.005799887442629755,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.005206073752711497,
						"acc_stderr,none": 0.0014992721829171585,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8076865671641792,
						"acc_stderr,none": 0.1659005530856495,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024982,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910652,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.011628164696727191,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.00872852720607479,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.782,
						"acc_stderr,none": 0.013063179040595287,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.553,
						"acc_stderr,none": 0.01573017604600907,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.011107987548939149,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378227,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098728,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030084,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487924,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.0068954729748978965,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.0068954729748978896,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.00997775303139723,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074792,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698459,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336664,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.656,
						"acc_stderr,none": 0.015029633724408947,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.681,
						"acc_stderr,none": 0.014746404865473484,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.731,
						"acc_stderr,none": 0.014029819522568196,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919289,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381791,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.00431945108291064,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.433,
						"acc_stderr,none": 0.015676630912181334,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695803,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386696,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.569,
						"acc_stderr,none": 0.015667944488173508,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.014580006055436972,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118581,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704156,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400241,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.014683991951087966,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525045,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.317,
						"acc_stderr,none": 0.014721675438880226,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.577,
						"acc_stderr,none": 0.015630589090476342,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.015817274929209008,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.01188449583454167,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.01577824302490459,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524315,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662739,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.756,
						"acc_stderr,none": 0.013588548437881421,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491116,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298324,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.011772110370812182,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.015786868759359005,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280309,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890138,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.00442940398017835,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.697,
						"acc_stderr,none": 0.01453968371053527,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.421,
						"acc_stderr,none": 0.01562059547530132,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024966,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.595,
						"acc_stderr,none": 0.015531136990453042,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378227,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936705,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.678,
						"acc_stderr,none": 0.014782913600996669,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.845,
						"acc_stderr,none": 0.011450157470799447,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792957,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523725,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727075,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698462,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.015039986742055238,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.296,
						"acc_stderr,none": 0.014442734941575027,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.710091743119266,
						"acc_stderr,none": 0.007935605384295892,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8214285714285714,
						"acc_stderr,none": 0.05164277182008721,
						"alias": "cb",
						"f1,none": 0.663220551378446,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.27637444279346207,
						"acc_norm,none": 0.27637444279346207,
						"acc_norm_stderr,none": 0.12130442877275041,
						"acc_stderr,none": 0.12130442877275041,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966263,
						"acc_stderr,none": 0.06520506636966263,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.0659529705144534,
						"acc_stderr,none": 0.0659529705144534,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.059278386873217015,
						"acc_stderr,none": 0.059278386873217015,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.1042572070285374,
						"acc_stderr,none": 0.1042572070285374,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033673,
						"acc_stderr,none": 0.10083169033033673,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633637,
						"acc_stderr,none": 0.09169709590633637,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.07497837474124878,
						"acc_stderr,none": 0.07497837474124878,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.3695652173913043,
						"acc_norm,none": 0.3695652173913043,
						"acc_norm_stderr,none": 0.07195473383945741,
						"acc_stderr,none": 0.07195473383945741,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2930409255741669,
						"acc_norm,none": 0.2930409255741669,
						"acc_norm_stderr,none": 0.050906020281709326,
						"acc_stderr,none": 0.050906020281709326,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2958579881656805,
						"acc_norm,none": 0.2958579881656805,
						"acc_norm_stderr,none": 0.035214144124964784,
						"acc_stderr,none": 0.035214144124964784,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2635135135135135,
						"acc_norm,none": 0.2635135135135135,
						"acc_norm_stderr,none": 0.036335000433819875,
						"acc_stderr,none": 0.036335000433819875,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.3875,
						"acc_norm,none": 0.3875,
						"acc_norm_stderr,none": 0.03863583812241406,
						"acc_stderr,none": 0.03863583812241406,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0347769116216366,
						"acc_stderr,none": 0.0347769116216366,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3492822966507177,
						"acc_norm,none": 0.3492822966507177,
						"acc_norm_stderr,none": 0.03305620024300093,
						"acc_stderr,none": 0.03305620024300093,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03489370652018761,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2748091603053435,
						"acc_norm,none": 0.2748091603053435,
						"acc_norm_stderr,none": 0.03915345408847837,
						"acc_stderr,none": 0.03915345408847837,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.040263772107873096,
						"acc_stderr,none": 0.040263772107873096,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.34579439252336447,
						"acc_norm,none": 0.34579439252336447,
						"acc_norm_stderr,none": 0.0461969359662258,
						"acc_stderr,none": 0.0461969359662258,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3281733746130031,
						"acc_norm,none": 0.3281733746130031,
						"acc_norm_stderr,none": 0.026166904017550823,
						"acc_stderr,none": 0.026166904017550823,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.031980016601150726,
						"acc_stderr,none": 0.031980016601150726,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.35195530726256985,
						"acc_norm,none": 0.35195530726256985,
						"acc_norm_stderr,none": 0.03579614323524846,
						"acc_stderr,none": 0.03579614323524846,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24050632911392406,
						"acc_norm,none": 0.24050632911392406,
						"acc_norm_stderr,none": 0.027820781981149675,
						"acc_stderr,none": 0.027820781981149675,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.040225592469367126,
						"acc_stderr,none": 0.040225592469367126,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.38317757009345793,
						"acc_norm,none": 0.38317757009345793,
						"acc_norm_stderr,none": 0.04722013080771233,
						"acc_stderr,none": 0.04722013080771233,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3490566037735849,
						"acc_norm,none": 0.3490566037735849,
						"acc_norm_stderr,none": 0.04651841326529027,
						"acc_stderr,none": 0.04651841326529027,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980982,
						"acc_stderr,none": 0.03957835471980982,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.041764667586049006,
						"acc_stderr,none": 0.041764667586049006,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2673992673992674,
						"acc_norm,none": 0.2673992673992674,
						"acc_norm_stderr,none": 0.026836713439088864,
						"acc_stderr,none": 0.026836713439088864,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.31862745098039214,
						"acc_norm,none": 0.31862745098039214,
						"acc_norm_stderr,none": 0.03270287181482081,
						"acc_stderr,none": 0.03270287181482081,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2807017543859649,
						"acc_norm,none": 0.2807017543859649,
						"acc_norm_stderr,none": 0.034462962170884265,
						"acc_stderr,none": 0.034462962170884265,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.0373874230421581,
						"acc_stderr,none": 0.0373874230421581,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.03694846055443904,
						"acc_stderr,none": 0.03694846055443904,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.03652215878407506,
						"acc_stderr,none": 0.03652215878407506,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3374233128834356,
						"acc_norm,none": 0.3374233128834356,
						"acc_norm_stderr,none": 0.03714908409935573,
						"acc_stderr,none": 0.03714908409935573,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.034078261673374376,
						"acc_stderr,none": 0.034078261673374376,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2976190476190476,
						"acc_norm,none": 0.2976190476190476,
						"acc_norm_stderr,none": 0.02885890598472122,
						"acc_stderr,none": 0.02885890598472122,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.26262626262626265,
						"acc_norm,none": 0.26262626262626265,
						"acc_norm_stderr,none": 0.031353050095330855,
						"acc_stderr,none": 0.031353050095330855,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.40756302521008403,
						"acc_norm,none": 0.40756302521008403,
						"acc_norm_stderr,none": 0.03191863374478465,
						"acc_stderr,none": 0.03191863374478465,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.23043478260869565,
						"acc_norm,none": 0.23043478260869565,
						"acc_norm_stderr,none": 0.027827807522276156,
						"acc_stderr,none": 0.027827807522276156,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.038221270785361555,
						"acc_stderr,none": 0.038221270785361555,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2897727272727273,
						"acc_norm,none": 0.2897727272727273,
						"acc_norm_stderr,none": 0.034293230802398746,
						"acc_stderr,none": 0.034293230802398746,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.28859060402684567,
						"acc_norm,none": 0.28859060402684567,
						"acc_norm_stderr,none": 0.03724517629698769,
						"acc_stderr,none": 0.03724517629698769,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.288135593220339,
						"acc_norm,none": 0.288135593220339,
						"acc_norm_stderr,none": 0.04187011593049808,
						"acc_stderr,none": 0.04187011593049808,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878284,
						"acc_stderr,none": 0.04122066502878284,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03873144730600103,
						"acc_stderr,none": 0.03873144730600103,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.04073524322147126,
						"acc_stderr,none": 0.04073524322147126,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.032314709966177586,
						"acc_stderr,none": 0.032314709966177586,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.313953488372093,
						"acc_norm,none": 0.313953488372093,
						"acc_norm_stderr,none": 0.035490439822271735,
						"acc_stderr,none": 0.035490439822271735,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26520681265206814,
						"acc_norm,none": 0.26520681265206814,
						"acc_norm_stderr,none": 0.021801329069745193,
						"acc_stderr,none": 0.021801329069745193,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3644859813084112,
						"acc_norm,none": 0.3644859813084112,
						"acc_norm_stderr,none": 0.032977154614516745,
						"acc_stderr,none": 0.032977154614516745,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3008130081300813,
						"acc_norm,none": 0.3008130081300813,
						"acc_norm_stderr,none": 0.04152073768551428,
						"acc_stderr,none": 0.04152073768551428,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.28688524590163933,
						"acc_norm,none": 0.28688524590163933,
						"acc_norm_stderr,none": 0.04111886635267183,
						"acc_stderr,none": 0.04111886635267183,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.319047619047619,
						"acc_norm,none": 0.319047619047619,
						"acc_norm_stderr,none": 0.03224133248962465,
						"acc_stderr,none": 0.03224133248962465,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.034251778896020865,
						"acc_stderr,none": 0.034251778896020865,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.30687830687830686,
						"acc_norm,none": 0.30687830687830686,
						"acc_norm_stderr,none": 0.03363635410184865,
						"acc_stderr,none": 0.03363635410184865,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.28448275862068967,
						"acc_norm,none": 0.28448275862068967,
						"acc_norm_stderr,none": 0.0420716075558402,
						"acc_stderr,none": 0.0420716075558402,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.25517241379310346,
						"acc_norm,none": 0.25517241379310346,
						"acc_norm_stderr,none": 0.03632984052707842,
						"acc_stderr,none": 0.03632984052707842,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3238095238095238,
						"acc_norm,none": 0.3238095238095238,
						"acc_norm_stderr,none": 0.04588414718067474,
						"acc_stderr,none": 0.04588414718067474,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.0340385177358705,
						"acc_stderr,none": 0.0340385177358705,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767256,
						"acc_stderr,none": 0.030113040167767256,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2579787234042553,
						"acc_norm,none": 0.2579787234042553,
						"acc_norm_stderr,none": 0.022593550801056263,
						"acc_stderr,none": 0.022593550801056263,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3275862068965517,
						"acc_norm,none": 0.3275862068965517,
						"acc_norm_stderr,none": 0.030879845620960845,
						"acc_stderr,none": 0.030879845620960845,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3160919540229885,
						"acc_norm,none": 0.3160919540229885,
						"acc_norm_stderr,none": 0.035349438976908586,
						"acc_stderr,none": 0.035349438976908586,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3008849557522124,
						"acc_norm,none": 0.3008849557522124,
						"acc_norm_stderr,none": 0.030576185297580976,
						"acc_stderr,none": 0.030576185297580976,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3212121212121212,
						"acc_norm,none": 0.3212121212121212,
						"acc_norm_stderr,none": 0.03646204963253812,
						"acc_stderr,none": 0.03646204963253812,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.03273943999002355,
						"acc_stderr,none": 0.03273943999002355,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3167701863354037,
						"acc_norm,none": 0.3167701863354037,
						"acc_norm_stderr,none": 0.03677863131157453,
						"acc_stderr,none": 0.03677863131157453,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.325,
						"acc_norm,none": 0.325,
						"acc_norm_stderr,none": 0.03714454174077367,
						"acc_stderr,none": 0.03714454174077367,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.18552761164714818,
						"mcc_stderr,none": 0.03263383144957462
					},
					"copa": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.03588702812826371,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.53166927549195,
						"likelihood_diff_stderr,none": 0.4813103227405036,
						"pct_stereotype,none": 0.6092725104353012,
						"pct_stereotype_stderr,none": 0.06886953129381028
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.6713625521765056,
						"likelihood_diff_stderr,none": 0.08683282353127537,
						"pct_stereotype,none": 0.6410256410256411,
						"pct_stereotype_stderr,none": 0.011717431086755267
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.195054945054945,
						"likelihood_diff_stderr,none": 0.3956957481690713,
						"pct_stereotype,none": 0.6813186813186813,
						"pct_stereotype_stderr,none": 0.04911704114831278
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.840909090909091,
						"likelihood_diff_stderr,none": 1.7831352391200552,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.869230769230769,
						"likelihood_diff_stderr,none": 0.6324818448174372,
						"pct_stereotype,none": 0.7538461538461538,
						"pct_stereotype_stderr,none": 0.05384615384615383
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.660546875,
						"likelihood_diff_stderr,none": 0.15994701590438323,
						"pct_stereotype,none": 0.646875,
						"pct_stereotype_stderr,none": 0.026759566559073203
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.5729166666666665,
						"likelihood_diff_stderr,none": 0.238825196387172,
						"pct_stereotype,none": 0.5601851851851852,
						"pct_stereotype_stderr,none": 0.03385177976044811
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.795138888888889,
						"likelihood_diff_stderr,none": 0.3248376862681021,
						"pct_stereotype,none": 0.7361111111111112,
						"pct_stereotype_stderr,none": 0.05230618728513982
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.4906496062992125,
						"likelihood_diff_stderr,none": 0.15021309463038085,
						"pct_stereotype,none": 0.5551181102362205,
						"pct_stereotype_stderr,none": 0.022070444592370703
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.56981981981982,
						"likelihood_diff_stderr,none": 0.34485989824596064,
						"pct_stereotype,none": 0.7117117117117117,
						"pct_stereotype_stderr,none": 0.04318860867532051
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.033602150537634,
						"likelihood_diff_stderr,none": 0.44113165682010524,
						"pct_stereotype,none": 0.9139784946236559,
						"pct_stereotype_stderr,none": 0.029233283218071043
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.201315789473684,
						"likelihood_diff_stderr,none": 0.2492130985844454,
						"pct_stereotype,none": 0.6736842105263158,
						"pct_stereotype_stderr,none": 0.034104864353344894
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.391025641025641,
						"likelihood_diff_stderr,none": 0.07807127507043073,
						"pct_stereotype,none": 0.5760286225402504,
						"pct_stereotype_stderr,none": 0.012071279168361647
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.459722222222222,
						"likelihood_diff_stderr,none": 0.3316544081553618,
						"pct_stereotype,none": 0.5222222222222223,
						"pct_stereotype_stderr,none": 0.05294752255076824
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.5,
						"likelihood_diff_stderr,none": 0.7076731463403725,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.856060606060606,
						"likelihood_diff_stderr,none": 0.43946024738808853,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.056159743502623156
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.8341121495327104,
						"likelihood_diff_stderr,none": 0.12838026759246415,
						"pct_stereotype,none": 0.573208722741433,
						"pct_stereotype_stderr,none": 0.027649620415261093
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.6946640316205532,
						"likelihood_diff_stderr,none": 0.21833377142161928,
						"pct_stereotype,none": 0.43478260869565216,
						"pct_stereotype_stderr,none": 0.031227956788816427
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.5555555555555554,
						"likelihood_diff_stderr,none": 0.46664000128750194,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.05594542388644592
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.027173913043478,
						"likelihood_diff_stderr,none": 0.14271709632897536,
						"pct_stereotype,none": 0.5152173913043478,
						"pct_stereotype_stderr,none": 0.023327190181139237
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.4706521739130434,
						"likelihood_diff_stderr,none": 0.29675065568015574,
						"pct_stereotype,none": 0.7391304347826086,
						"pct_stereotype_stderr,none": 0.041126317518561634
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.9175824175824174,
						"likelihood_diff_stderr,none": 0.34109674939215345,
						"pct_stereotype,none": 0.7362637362637363,
						"pct_stereotype_stderr,none": 0.046449428524973954
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.949298469387755,
						"likelihood_diff_stderr,none": 0.26276913938278834,
						"pct_stereotype,none": 0.7091836734693877,
						"pct_stereotype_stderr,none": 0.03252156607969809
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.09744094488188976,
						"exact_match_stderr,none": 0.006580419724685966
					},
					"glue": {
						"acc,none": 0.733906026679371,
						"acc_stderr,none": 0.004181084270866984,
						"alias": "glue",
						"f1,none": 0.6836615927918375,
						"f1_stderr,none": 0.00019820154424976516,
						"mcc,none": 0.18552761164714818,
						"mcc_stderr,none": 0.03263383144957462
					},
					"hellaswag": {
						"acc,none": 0.5373431587333201,
						"acc_norm,none": 0.7252539334793866,
						"acc_norm_stderr,none": 0.004454739415705044,
						"acc_stderr,none": 0.004975845335086617,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2904418134565407,
						"acc_norm,none": 0.2904418134565407,
						"acc_norm_stderr,none": 0.030307895154149046,
						"acc_stderr,none": 0.030307895154149046,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816508,
						"acc_stderr,none": 0.04229525846816508,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.289,
						"acc_norm,none": 0.289,
						"acc_norm_stderr,none": 0.014341711358296184,
						"acc_stderr,none": 0.014341711358296184,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.297,
						"acc_norm,none": 0.297,
						"acc_norm_stderr,none": 0.0144568322948011,
						"acc_stderr,none": 0.0144568322948011,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.013736254390651154,
						"acc_stderr,none": 0.013736254390651154,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306265,
						"acc_stderr,none": 0.014158794845306265,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.25833333333333336,
						"acc_norm,none": 0.25833333333333336,
						"acc_norm_stderr,none": 0.017884680783142228,
						"acc_stderr,none": 0.017884680783142228,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.31,
						"acc_norm,none": 0.31,
						"acc_norm_stderr,none": 0.014632638658632902,
						"acc_stderr,none": 0.014632638658632902,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.015090650341444233,
						"acc_stderr,none": 0.015090650341444233,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.321,
						"acc_norm,none": 0.321,
						"acc_norm_stderr,none": 0.014770821817934638,
						"acc_stderr,none": 0.014770821817934638,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.031652557907861936,
						"acc_stderr,none": 0.031652557907861936,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.32,
						"acc_norm,none": 0.32,
						"acc_norm_stderr,none": 0.014758652303574876,
						"acc_stderr,none": 0.014758652303574876,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2230769230769231,
						"acc_norm,none": 0.2230769230769231,
						"acc_norm_stderr,none": 0.036654008682010436,
						"acc_stderr,none": 0.036654008682010436,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.01456664639466438,
						"acc_stderr,none": 0.01456664639466438,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.331,
						"acc_norm,none": 0.331,
						"acc_norm_stderr,none": 0.014888272588203928,
						"acc_stderr,none": 0.014888272588203928,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.013860415257527911,
						"acc_stderr,none": 0.013860415257527911,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.309,
						"acc_norm,none": 0.309,
						"acc_norm_stderr,none": 0.014619600977206488,
						"acc_stderr,none": 0.014619600977206488,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.01397996564514516,
						"acc_stderr,none": 0.01397996564514516,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.014356395999905689,
						"acc_stderr,none": 0.014356395999905689,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.287,
						"acc_norm,none": 0.287,
						"acc_norm_stderr,none": 0.014312087053809961,
						"acc_stderr,none": 0.014312087053809961,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.323,
						"acc_norm,none": 0.323,
						"acc_norm_stderr,none": 0.014794927843348633,
						"acc_stderr,none": 0.014794927843348633,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.298,
						"acc_norm,none": 0.298,
						"acc_norm_stderr,none": 0.014470846741134715,
						"acc_stderr,none": 0.014470846741134715,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.32,
						"acc_norm,none": 0.32,
						"acc_norm_stderr,none": 0.014758652303574883,
						"acc_stderr,none": 0.014758652303574883,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.014370995982377933,
						"acc_stderr,none": 0.014370995982377933,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.253,
						"acc_norm,none": 0.253,
						"acc_norm_stderr,none": 0.01375427861358708,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.274,
						"acc_norm,none": 0.274,
						"acc_norm_stderr,none": 0.014111099288259592,
						"acc_stderr,none": 0.014111099288259592,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.01356964019917744,
						"acc_stderr,none": 0.01356964019917744,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.2833333333333333,
						"acc_norm,none": 0.2833333333333333,
						"acc_norm_stderr,none": 0.018411705808458512,
						"acc_stderr,none": 0.018411705808458512,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.258,
						"acc_norm,none": 0.258,
						"acc_norm_stderr,none": 0.013842963108656604,
						"acc_stderr,none": 0.013842963108656604,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.014326941797231558,
						"acc_stderr,none": 0.014326941797231558,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.304,
						"acc_norm,none": 0.304,
						"acc_norm_stderr,none": 0.01455320568795044,
						"acc_stderr,none": 0.01455320568795044,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.308,
						"acc_norm,none": 0.308,
						"acc_norm_stderr,none": 0.014606483127342758,
						"acc_stderr,none": 0.014606483127342758,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.04163331998932269,
						"acc_stderr,none": 0.04163331998932269,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.25666666666666665,
						"acc_norm,none": 0.25666666666666665,
						"acc_norm_stderr,none": 0.025260441987310478,
						"acc_stderr,none": 0.025260441987310478,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259722,
						"acc_stderr,none": 0.013929286594259722,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.309,
						"acc_norm,none": 0.309,
						"acc_norm_stderr,none": 0.014619600977206486,
						"acc_stderr,none": 0.014619600977206486,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.287,
						"acc_norm,none": 0.287,
						"acc_norm_stderr,none": 0.014312087053809963,
						"acc_stderr,none": 0.014312087053809963,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.030275120389073047,
						"acc_stderr,none": 0.030275120389073047,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633918,
						"acc_stderr,none": 0.014046255632633918,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633916,
						"acc_stderr,none": 0.014046255632633916,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.031093957143700262,
						"acc_stderr,none": 0.031093957143700262,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.355,
						"acc_norm,none": 0.355,
						"acc_norm_stderr,none": 0.015139491543780529,
						"acc_stderr,none": 0.015139491543780529,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5150186362639771,
						"acc_norm,none": 0.568,
						"acc_norm_stderr,none": 0.0004917354709418831,
						"acc_stderr,none": 0.04483636067141316,
						"alias": "kobest",
						"f1,none": 0.41250565165853087,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5064102564102564,
						"acc_stderr,none": 0.013347670414620429,
						"alias": " - kobest_boolq",
						"f1,none": 0.34497952413042354,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.611,
						"acc_stderr,none": 0.015424555647308495,
						"alias": " - kobest_copa",
						"f1,none": 0.6096411608810268,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.406,
						"acc_norm,none": 0.568,
						"acc_norm_stderr,none": 0.022175109265613172,
						"acc_stderr,none": 0.021983962090086333,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.4016092063325507,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5214105793450882,
						"acc_stderr,none": 0.025102898696363056,
						"alias": " - kobest_sentineg",
						"f1,none": 0.421478095471837,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.48968253968253966,
						"acc_stderr,none": 0.01408849682055904,
						"alias": " - kobest_wic",
						"f1,none": 0.33278925189351044,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7108480496797982,
						"acc_stderr,none": 0.01783916166560649,
						"alias": "lambada",
						"perplexity,none": 3.6897960827608927,
						"perplexity_stderr,none": 0.2036544107354847
					},
					"lambada_cloze": {
						"acc,none": 0.03415486124587619,
						"acc_stderr,none": 0.0037119704020330112,
						"alias": "lambada_cloze",
						"perplexity,none": 516.967698388973,
						"perplexity_stderr,none": 105.66634817084605
					},
					"lambada_multilingual": {
						"acc,none": 0.5350281389481856,
						"acc_stderr,none": 0.08138382266812279,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.70331850556869,
						"perplexity_stderr,none": 8.455379183060192
					},
					"lambada_openai": {
						"acc,none": 0.7440326023675529,
						"acc_stderr,none": 0.006079955244951849,
						"alias": " - lambada_openai",
						"perplexity,none": 3.310458832487118,
						"perplexity_stderr,none": 0.06455342172655767
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.028721133320395886,
						"acc_stderr,none": 0.002326938091450223,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 308.5802147387183,
						"perplexity_stderr,none": 9.670646766329824
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4248010867455851,
						"acc_stderr,none": 0.006886743547830465,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 36.24982961021297,
						"perplexity_stderr,none": 2.028734842086422
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7440326023675529,
						"acc_stderr,none": 0.006079955244951853,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3107852312820834,
						"perplexity_stderr,none": 0.06464488446377994
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45294003493110807,
						"acc_stderr,none": 0.006935054751870186,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.315822272696305,
						"perplexity_stderr,none": 1.4412680338485646
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5441490393945274,
						"acc_stderr,none": 0.00693876922137912,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.93847695318919,
						"perplexity_stderr,none": 0.8284725459264435
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5092179313021541,
						"acc_stderr,none": 0.0069647937547564305,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.701678460462887,
						"perplexity_stderr,none": 1.214082776442715
					},
					"lambada_standard": {
						"acc,none": 0.6774694352804191,
						"acc_stderr,none": 0.006512419447011694,
						"alias": " - lambada_standard",
						"perplexity,none": 4.0687940080085445,
						"perplexity_stderr,none": 0.08342530295936354
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.03958858917135649,
						"acc_stderr,none": 0.002716600279140424,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 725.3551820392277,
						"perplexity_stderr,none": 22.858564440599476
					},
					"logiqa": {
						"acc,none": 0.23963133640552994,
						"acc_norm,none": 0.2872503840245776,
						"acc_norm_stderr,none": 0.017747701948846593,
						"acc_stderr,none": 0.01674276693510144,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.26145038167938933,
						"acc_norm,none": 0.2907124681933842,
						"acc_norm_stderr,none": 0.011456577557813217,
						"acc_stderr,none": 0.011086549147132491,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2556113902847571,
						"acc_norm,none": 0.2633165829145729,
						"acc_norm_stderr,none": 0.008062693356094492,
						"acc_stderr,none": 0.00798528739784743,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.43433594577420037,
						"acc_stderr,none": 0.005101327183198691,
						"alias": "mc_taco",
						"f1,none": 0.5352823457756896,
						"f1_stderr,none": 0.005645437385990162
					},
					"medmcqa": {
						"acc,none": 0.3449677265120727,
						"acc_norm,none": 0.3449677265120727,
						"acc_norm_stderr,none": 0.007350697793603448,
						"acc_stderr,none": 0.007350697793603448,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3511390416339356,
						"acc_norm,none": 0.3511390416339356,
						"acc_norm_stderr,none": 0.013383565413283813,
						"acc_stderr,none": 0.013383565413283813,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.39032901296111666,
						"acc_stderr,none": 0.08464436301362857,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4222222222222222,
						"acc_stderr,none": 0.04266763404099582,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.375,
						"acc_stderr,none": 0.039397364351956274,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.38113207547169814,
						"acc_stderr,none": 0.02989060968628663,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4166666666666667,
						"acc_stderr,none": 0.041227287076512825,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.047609522856952344,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.35260115606936415,
						"acc_stderr,none": 0.03643037168958549,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.04280105837364395,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.04988876515698589,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3659574468085106,
						"acc_stderr,none": 0.03148955829745528,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2719298245614035,
						"acc_stderr,none": 0.04185774424022056,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4068965517241379,
						"acc_stderr,none": 0.04093793981266237,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.30687830687830686,
						"acc_stderr,none": 0.023752928712112136,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3253968253968254,
						"acc_stderr,none": 0.04190596438871136,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695236,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.45806451612903226,
						"acc_stderr,none": 0.028343787250540636,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.28078817733990147,
						"acc_stderr,none": 0.0316185633535861,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.04923659639173309,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5212121212121212,
						"acc_stderr,none": 0.03900828913737301,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4494949494949495,
						"acc_stderr,none": 0.03544132491947969,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.538860103626943,
						"acc_stderr,none": 0.035975244117345775,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.35128205128205126,
						"acc_stderr,none": 0.024203665177902803,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.027940457136228405,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.37815126050420167,
						"acc_stderr,none": 0.031499305777849054,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.26490066225165565,
						"acc_stderr,none": 0.03603038545360384,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.48807339449541287,
						"acc_stderr,none": 0.021431223617362233,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.27314814814814814,
						"acc_stderr,none": 0.03038805130167812,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4852941176470588,
						"acc_stderr,none": 0.035077938347913236,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.540084388185654,
						"acc_stderr,none": 0.03244246810187913,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4125560538116592,
						"acc_stderr,none": 0.03304062175449297,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5114503816793893,
						"acc_stderr,none": 0.043841400240780176,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3640807651434644,
						"acc_stderr,none": 0.08496979813396047,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4049586776859504,
						"acc_stderr,none": 0.04481137755942469,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.4351851851851852,
						"acc_stderr,none": 0.04792898170907062,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3619631901840491,
						"acc_stderr,none": 0.037757007291414416,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841044,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4854368932038835,
						"acc_stderr,none": 0.04948637324026637,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6111111111111112,
						"acc_stderr,none": 0.03193705726200293,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5721583652618135,
						"acc_stderr,none": 0.01769278792780373,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3554913294797688,
						"acc_stderr,none": 0.025770292082977247,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.21899441340782122,
						"acc_stderr,none": 0.013831676687303203,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.41830065359477125,
						"acc_stderr,none": 0.028245134024387285,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.4431927904731252,
						"acc_stderr,none": 0.08949673726168551,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4790996784565916,
						"acc_stderr,none": 0.028373270961069414,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4506172839506173,
						"acc_stderr,none": 0.02768472141565619,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2553191489361702,
						"acc_stderr,none": 0.026011992930902013,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3200782268578879,
						"acc_stderr,none": 0.011914791947638503,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.41544117647058826,
						"acc_stderr,none": 0.029935342707877743,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.39052287581699346,
						"acc_stderr,none": 0.019737008998094607,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.43636363636363634,
						"acc_stderr,none": 0.04750185058907297,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3306122448979592,
						"acc_stderr,none": 0.030116426296540603,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.42671433214169646,
						"acc_stderr,none": 0.0709954903179147,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5422885572139303,
						"acc_stderr,none": 0.035228658640995975,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3418966064065969,
						"acc_stderr,none": 0.06782984644227069,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.050211673156867795,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.03726214354322415,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.5789473684210527,
						"acc_stderr,none": 0.037867207062342145,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7802343352012227,
						"acc_stderr,none": 0.004179933984206167,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7789869812855981,
						"acc_stderr,none": 0.004184804557506983,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7352941176470589,
						"acc_stderr,none": 0.02186830575426217,
						"alias": "mrpc",
						"f1,none": 0.8338461538461538,
						"f1_stderr,none": 0.015750089856866507
					},
					"multimedqa": {
						"acc,none": 0.37885024840312276,
						"acc_norm,none": 0.3453393240464078,
						"acc_norm_stderr,none": 0.00010225001159147576,
						"acc_stderr,none": 0.08990379276638243,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.47215346534653463,
						"acc_stderr,none": 0.007170656491985979,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7016553815883115,
						"mrr_stderr,none": 0.010362148757279028,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4153498871331828,
						"r@2_stderr,none": 0.016564694549772725
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6531226501776994,
						"mrr_stderr,none": 0.010395763018140658,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4762979683972912,
						"r@2_stderr,none": 0.016788421275515525
					},
					"openbookqa": {
						"acc,none": 0.298,
						"acc_norm,none": 0.408,
						"acc_norm_stderr,none": 0.02200091089387719,
						"acc_stderr,none": 0.020475118092988975,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.449,
						"acc_stderr,none": 0.011124809242874425,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.384,
						"acc_stderr,none": 0.010878012942757032,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4375,
						"acc_stderr,none": 0.011095423796079503,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5455,
						"acc_stderr,none": 0.011136735987003715,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.01113040061763076,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5395,
						"acc_stderr,none": 0.011148184426533288,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5145,
						"acc_stderr,none": 0.011178432523249468,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4883571428571429,
						"acc_stderr,none": 0.050400093125208184,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7736670293797606,
						"acc_norm,none": 0.7872687704026116,
						"acc_norm_stderr,none": 0.009548223123047345,
						"acc_stderr,none": 0.009763294246879427,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.24407557643040137,
						"acc_norm,none": 0.290296754910333,
						"acc_norm_stderr,none": 0.0033161383211952303,
						"acc_stderr,none": 0.0031381558044888797,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.020143572847290778,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7311814892579026,
						"acc_norm,none": 0.6465023477418785,
						"acc_norm_stderr,none": 0.010002717577357463,
						"acc_stderr,none": 0.15560356723689256,
						"alias": "pythia",
						"bits_per_byte,none": 0.6345998198671968,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5525070500726705,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.310458832487118,
						"perplexity_stderr,none": 0.06455342172655767,
						"word_perplexity,none": 10.508546370052967,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3971631205673759,
						"acc_norm,none": 0.4521276595744681,
						"acc_norm_stderr,none": 0.0597660080799269,
						"acc_stderr,none": 0.050531397619411646,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.475,
						"acc_norm,none": 0.5583333333333333,
						"acc_norm_stderr,none": 0.04552192400253556,
						"acc_stderr,none": 0.04577759534198058,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3375,
						"acc_norm,none": 0.46875,
						"acc_norm_stderr,none": 0.039575057062617526,
						"acc_stderr,none": 0.03749999999999997,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.397887323943662,
						"acc_norm,none": 0.397887323943662,
						"acc_norm_stderr,none": 0.029095492917064893,
						"acc_stderr,none": 0.029095492917064907,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7417017066534751,
						"acc_stderr,none": 0.002176852023284019,
						"alias": "qqp",
						"f1,none": 0.682419487273059,
						"f1_stderr,none": 0.0029498771447202658
					},
					"race": {
						"acc,none": 0.3406698564593301,
						"acc_stderr,none": 0.014667904380876565,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2712,
						"em_stderr,none": 0.004446013124505284,
						"f1,none": 0.28069523833990095,
						"f1_stderr,none": 0.004456802402157604
					},
					"rte": {
						"acc,none": 0.6823104693140795,
						"acc_stderr,none": 0.028024503562454613,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.959,
						"acc_norm,none": 0.956,
						"acc_norm_stderr,none": 0.006488921798427418,
						"acc_stderr,none": 0.006273624021118775,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6787003610108303,
						"acc_stderr,none": 0.02810862605328869,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8841743119266054,
						"acc_stderr,none": 0.010843320972287813,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5777266819954013,
						"acc_norm,none": 0.7698690392882135,
						"acc_norm_stderr,none": 0.002975957157190495,
						"acc_stderr,none": 0.003492116754268531,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.7054673721340388,
						"acc_stderr,none": 0.07219777188751882,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6506410256410257,
						"acc_stderr,none": 0.004771733037470924,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8974358974358975,
						"acc_stderr,none": 0.0030544207066755817,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5734313725490197,
						"acc_stderr,none": 0.004897296343312118,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3233447259750329,
						"acc_stderr,none": 0.001482427995604895,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3157894736842105,
						"bleu_acc_stderr,none": 0.016272287957916916,
						"bleu_diff,none": -7.5282040758627256,
						"bleu_diff_stderr,none": 0.8642504936435088,
						"bleu_max,none": 26.477625970727555,
						"bleu_max_stderr,none": 0.8046493977366398,
						"rouge1_acc,none": 0.27906976744186046,
						"rouge1_acc_stderr,none": 0.0157021070906279,
						"rouge1_diff,none": -9.61504869525477,
						"rouge1_diff_stderr,none": 0.9513326849526342,
						"rouge1_max,none": 51.72074865390213,
						"rouge1_max_stderr,none": 0.8841677319956579,
						"rouge2_acc,none": 0.2558139534883721,
						"rouge2_acc_stderr,none": 0.015274176219283361,
						"rouge2_diff,none": -11.761825712496476,
						"rouge2_diff_stderr,none": 1.146784137451743,
						"rouge2_max,none": 35.375626448829145,
						"rouge2_max_stderr,none": 1.0301814558490707,
						"rougeL_acc,none": 0.28518971848225216,
						"rougeL_acc_stderr,none": 0.015805827874454892,
						"rougeL_diff,none": -9.949937602219535,
						"rougeL_diff_stderr,none": 0.9687235296572093,
						"rougeL_max,none": 48.82845988968157,
						"rougeL_max_stderr,none": 0.9011067554350052
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3157894736842105,
						"bleu_acc_stderr,none": 0.016272287957916916,
						"bleu_diff,none": -7.5282040758627256,
						"bleu_diff_stderr,none": 0.8642504936435088,
						"bleu_max,none": 26.477625970727555,
						"bleu_max_stderr,none": 0.8046493977366398,
						"rouge1_acc,none": 0.27906976744186046,
						"rouge1_acc_stderr,none": 0.0157021070906279,
						"rouge1_diff,none": -9.61504869525477,
						"rouge1_diff_stderr,none": 0.9513326849526342,
						"rouge1_max,none": 51.72074865390213,
						"rouge1_max_stderr,none": 0.8841677319956579,
						"rouge2_acc,none": 0.2558139534883721,
						"rouge2_acc_stderr,none": 0.015274176219283361,
						"rouge2_diff,none": -11.761825712496476,
						"rouge2_diff_stderr,none": 1.146784137451743,
						"rouge2_max,none": 35.375626448829145,
						"rouge2_max_stderr,none": 1.0301814558490707,
						"rougeL_acc,none": 0.28518971848225216,
						"rougeL_acc_stderr,none": 0.015805827874454892,
						"rougeL_diff,none": -9.949937602219535,
						"rougeL_diff_stderr,none": 0.9687235296572093,
						"rougeL_max,none": 48.82845988968157,
						"rougeL_max_stderr,none": 0.9011067554350052
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2521419828641371,
						"acc_stderr,none": 0.015201522246299969,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.39454746908592875,
						"acc_stderr,none": 0.014055767017763458,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.09793307086614174,
						"exact_match_stderr,none": 0.006595217240281941
					},
					"wic": {
						"acc,none": 0.5736677115987461,
						"acc_stderr,none": 0.01959451867527903,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.634581933282372,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5524878021539386,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.50784969875171,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7040252565114443,
						"acc_stderr,none": 0.012829348226339011,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4788732394366197,
						"acc_stderr,none": 0.05970805879899504,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.40384615384615385,
						"acc_stderr,none": 0.048346889526540184,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8461538461538461,
						"acc_stderr,none": 0.021876786884404677,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6187272727272727,
						"acc_stderr,none": 0.06898295782972137,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.02203367799374087,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231336,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.02043534209189613,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.019966103540279462,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269948,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.022064943313928866,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228134,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.021513662527582404,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.02009995064750323,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.020629569998345393,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4359571619812584,
						"acc_stderr,none": 0.050701097013861586,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.336144578313253,
						"acc_stderr,none": 0.009468634669293527,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4819277108433735,
						"acc_stderr,none": 0.010015524156629808,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.48032128514056227,
						"acc_stderr,none": 0.010014307727112707,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38835341365461845,
						"acc_stderr,none": 0.009769028875673286,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5224899598393574,
						"acc_stderr,none": 0.010011929439393996,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5012048192771085,
						"acc_stderr,none": 0.010022043771315572,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5016064257028112,
						"acc_stderr,none": 0.010022021141102096,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42409638554216866,
						"acc_stderr,none": 0.009905918244994484,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4859437751004016,
						"acc_stderr,none": 0.010018111813088548,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3887550200803213,
						"acc_stderr,none": 0.009770869423441492,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42168674698795183,
						"acc_stderr,none": 0.009898379493335442,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.45180722891566266,
						"acc_stderr,none": 0.009975410845717854,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41244979919678715,
						"acc_stderr,none": 0.009867237678555588,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3931726907630522,
						"acc_stderr,none": 0.009790655797269846,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.00955664246013815,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6256542927621683,
						"acc_stderr,none": 0.06133943754874467,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.587028457974851,
						"acc_stderr,none": 0.01267071629096672,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7789543348775645,
						"acc_stderr,none": 0.010678457581809242,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7088021178027796,
						"acc_stderr,none": 0.011691443511878192,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5704831237590999,
						"acc_stderr,none": 0.012738639381353993,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5949702183984117,
						"acc_stderr,none": 0.012632887218751377,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6631369953673064,
						"acc_stderr,none": 0.012162974996136392,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5420251489080079,
						"acc_stderr,none": 0.012821595164245279,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6737260092653872,
						"acc_stderr,none": 0.012065474625979056,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5506287227001986,
						"acc_stderr,none": 0.012800991591293373,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.585704831237591,
						"acc_stderr,none": 0.012676689821720669,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6267372600926538,
						"acc_stderr,none": 0.01244691155352713,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8165879973027647,
						"acc_stderr,none": 0.03640734210263105,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8756989247311828,
						"acc_stderr,none": 0.006843791800722096,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.048740641331093675,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7570385818561001,
						"acc_stderr,none": 0.013856224434217395,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7642585551330798,
						"acc_stderr,none": 0.02622330820622253,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6761904761904762,
						"acc_stderr,none": 0.02640672299673,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7857142857142857,
						"acc_stderr,none": 0.018295527755776197,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C1-rwkv-390_pth"
	},
	"./rwkv-x-dev/1_3-C2-rwkv-250_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6364148816234498,
						"acc_norm,none": 0.6366967305524239,
						"acc_norm_stderr,none": 0.08909004492334426,
						"acc_stderr,none": 0.1068815699771854,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4871875,
						"acc_stderr,none": 0.04751611240877701,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0445,
						"acc_stderr,none": 0.0409549629492404,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.811089552238806,
						"acc_stderr,none": 0.1724354050249065,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2830609212481426,
						"acc_norm,none": 0.2830609212481426,
						"acc_norm_stderr,none": 0.1310669643028631,
						"acc_stderr,none": 0.1310669643028631,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.3155758936280435,
						"acc_norm,none": 0.3155758936280435,
						"acc_norm_stderr,none": 0.06043464901317516,
						"acc_stderr,none": 0.06043464901317516,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.412865235539654,
						"likelihood_diff_stderr,none": 0.500151919189628,
						"pct_stereotype,none": 0.6025641025641025,
						"pct_stereotype_stderr,none": 0.07164851760982685
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.1171259842519685,
						"exact_match_stderr,none": 0.007135444500870198
					},
					"glue": {
						"acc,none": 0.7412756074321105,
						"acc_stderr,none": 0.004299458968138233,
						"alias": "glue",
						"f1,none": 0.6962593018323248,
						"f1_stderr,none": 0.00016282719694904701,
						"mcc,none": 0.2816000061659921,
						"mcc_stderr,none": 0.029955045292457825
					},
					"kmmlu": {
						"acc,none": 0.29745885070747896,
						"acc_norm,none": 0.29745885070747896,
						"acc_norm_stderr,none": 0.03218073930545326,
						"acc_stderr,none": 0.03218073930545326,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5242271431703573,
						"acc_norm,none": 0.56,
						"acc_norm_stderr,none": 0.0004937875751502988,
						"acc_stderr,none": 0.04314707203196712,
						"alias": "kobest",
						"f1,none": 0.4235645891948346,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7152144381913449,
						"acc_stderr,none": 0.017511765798143505,
						"alias": "lambada",
						"perplexity,none": 3.6535139116335005,
						"perplexity_stderr,none": 0.18871387326676559
					},
					"lambada_cloze": {
						"acc,none": 0.02852707160877159,
						"acc_stderr,none": 0.0026398219610884563,
						"alias": "lambada_cloze",
						"perplexity,none": 563.4337457991705,
						"perplexity_stderr,none": 121.96836321126048
					},
					"lambada_multilingual": {
						"acc,none": 0.5405394915583156,
						"acc_stderr,none": 0.0846117306162093,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.86571150485598,
						"perplexity_stderr,none": 8.150236500814422
					},
					"mmlu": {
						"acc,none": 0.3946019085600342,
						"acc_stderr,none": 0.08539276370984222,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.369394261424017,
						"acc_stderr,none": 0.08311216866103506,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.4451239137431606,
						"acc_stderr,none": 0.08563176931775165,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4387390315242119,
						"acc_stderr,none": 0.06996070742463358,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3393593403108151,
						"acc_stderr,none": 0.07698857772273254,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.38183108587650816,
						"acc_norm,none": 0.35166449926396326,
						"acc_norm_stderr,none": 0.00011553511229547551,
						"acc_stderr,none": 0.08452727048470678,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.48542857142857143,
						"acc_stderr,none": 0.052647164906520216,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7344689678890879,
						"acc_norm,none": 0.6408365677485831,
						"acc_norm_stderr,none": 0.0099957622064333,
						"acc_stderr,none": 0.16090150899679626,
						"alias": "pythia",
						"bits_per_byte,none": 0.6332968939538636,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5511055837419194,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3065743677448243,
						"perplexity_stderr,none": 0.06447553247686984,
						"word_perplexity,none": 10.457918885929645,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3882978723404255,
						"acc_norm,none": 0.4397163120567376,
						"acc_norm_stderr,none": 0.05922186888158541,
						"acc_stderr,none": 0.04593587745589221,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.689993677415061,
						"acc_stderr,none": 0.06865731276455735,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3252621810611377,
						"acc_stderr,none": 0.0014219022311066855,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.016095884155386844,
						"bleu_diff,none": -7.984552716030973,
						"bleu_diff_stderr,none": 0.8609552262410113,
						"bleu_max,none": 26.513491067585093,
						"bleu_max_stderr,none": 0.7987765319759287,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.015550778332842885,
						"rouge1_diff,none": -9.950793269593063,
						"rouge1_diff_stderr,none": 0.9386833036851832,
						"rouge1_max,none": 52.022729904553884,
						"rouge1_max_stderr,none": 0.8649333366072448,
						"rouge2_acc,none": 0.25091799265605874,
						"rouge2_acc_stderr,none": 0.015176985027707696,
						"rouge2_diff,none": -12.01821276266762,
						"rouge2_diff_stderr,none": 1.1294865215426677,
						"rouge2_max,none": 35.82862702274702,
						"rouge2_max_stderr,none": 1.0167634694388548,
						"rougeL_acc,none": 0.27050183598531213,
						"rougeL_acc_stderr,none": 0.015550778332842881,
						"rougeL_diff,none": -10.408699238302749,
						"rougeL_diff_stderr,none": 0.9535276857579184,
						"rougeL_max,none": 49.06780650591617,
						"rougeL_max_stderr,none": 0.8810583542317346
					},
					"xcopa": {
						"acc,none": 0.6221818181818182,
						"acc_stderr,none": 0.07145433936784358,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43777777777777777,
						"acc_stderr,none": 0.05138594243757625,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6294446784188676,
						"acc_stderr,none": 0.060553599861075065,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8186109238031019,
						"acc_stderr,none": 0.037777648582528064,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6364148816234498,
						"acc_norm,none": 0.6366967305524239,
						"acc_norm_stderr,none": 0.08909004492334426,
						"acc_stderr,none": 0.1068815699771854,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4871875,
						"acc_stderr,none": 0.04751611240877701,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.015594460144140601,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.452,
						"acc_stderr,none": 0.01574623586588068,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.43583333333333335,
						"acc_stderr,none": 0.014320373365192124,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4104095563139932,
						"acc_norm,none": 0.44880546075085326,
						"acc_norm_stderr,none": 0.01453459958509767,
						"acc_stderr,none": 0.01437492219264266,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.747895622895623,
						"acc_norm,none": 0.7293771043771043,
						"acc_norm_stderr,none": 0.009116466166403827,
						"acc_stderr,none": 0.008910024163218188,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0445,
						"acc_stderr,none": 0.0409549629492404,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.013,
						"acc_stderr,none": 0.002533517190523322,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.165,
						"acc_stderr,none": 0.00830192513700815,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.08,
						"acc_stderr,none": 0.006067817499282812,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0195,
						"acc_stderr,none": 0.003092678018912415,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0125,
						"acc_stderr,none": 0.00248494717876267,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0595,
						"acc_stderr,none": 0.005290923542820115,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.003,
						"acc_stderr,none": 0.0012232122154647075,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0415,
						"acc_stderr,none": 0.0044608098381578925,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000169,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0505,
						"acc_stderr,none": 0.004897639067368751,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0021691973969631237,
						"acc_stderr,none": 0.0009692521054558653,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.811089552238806,
						"acc_stderr,none": 0.1724354050249065,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.00949157995752505,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611466,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448804,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.838,
						"acc_stderr,none": 0.011657267771304398,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942328,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.785,
						"acc_stderr,none": 0.01299784381903185,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.015712507211864207,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.844,
						"acc_stderr,none": 0.011480235006122361,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042977,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987286,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745029885,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.0077997330618320435,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.00676326413366667,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.00708810561724644,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653909,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796377,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656804,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397247,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.014632638658632895,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.675,
						"acc_stderr,none": 0.014818724459095526,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.747,
						"acc_stderr,none": 0.01375427861358708,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697601,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108665,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306495,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.442,
						"acc_stderr,none": 0.015712507211864207,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248128,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.811,
						"acc_stderr,none": 0.012386784588117714,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.573,
						"acc_stderr,none": 0.015649789644462217,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.703,
						"acc_stderr,none": 0.014456832294801108,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336664,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.00801893405031514,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.682,
						"acc_stderr,none": 0.014734079309311901,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785134,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.321,
						"acc_stderr,none": 0.014770821817934647,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.015615500115072956,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.525,
						"acc_stderr,none": 0.015799513429996,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504387,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.511,
						"acc_stderr,none": 0.015815471195292686,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662721,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783222,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.762,
						"acc_stderr,none": 0.013473586661967218,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333456,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103312,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.843,
						"acc_stderr,none": 0.011510146979230177,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.513,
						"acc_stderr,none": 0.015813952101896626,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.0072744014816970735,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.01074366913239734,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611463,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.014297146862517908,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.436,
						"acc_stderr,none": 0.01568917302314407,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747403,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.0096580162185243,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.01552498067712258,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.0109781838443578,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747391,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.014414290540008218,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.843,
						"acc_stderr,none": 0.011510146979230194,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557421,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333443,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318192,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275287,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.361,
						"acc_stderr,none": 0.015195720118175124,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.289,
						"acc_stderr,none": 0.014341711358296181,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7033639143730887,
						"acc_stderr,none": 0.007989039569104808,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8392857142857143,
						"acc_stderr,none": 0.049522300593062986,
						"alias": "cb",
						"f1,none": 0.6750864689235928,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2830609212481426,
						"acc_norm,none": 0.2830609212481426,
						"acc_norm_stderr,none": 0.1310669643028631,
						"acc_stderr,none": 0.1310669643028631,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5454545454545454,
						"acc_norm,none": 0.5454545454545454,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122592,
						"acc_stderr,none": 0.08503766788122592,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.3191489361702128,
						"acc_norm,none": 0.3191489361702128,
						"acc_norm_stderr,none": 0.0687296045180637,
						"acc_stderr,none": 0.0687296045180637,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.060606060606060594,
						"acc_stderr,none": 0.060606060606060594,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502246,
						"acc_stderr,none": 0.07401656182502246,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.12909944487358055,
						"acc_stderr,none": 0.12909944487358055,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031764,
						"acc_stderr,none": 0.07633651333031764,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.1042572070285374,
						"acc_stderr,none": 0.1042572070285374,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.14213381090374033,
						"acc_stderr,none": 0.14213381090374033,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.10154334054280735,
						"acc_stderr,none": 0.10154334054280735,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.062069005411206336,
						"acc_stderr,none": 0.062069005411206336,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628253,
						"acc_stderr,none": 0.05817221556628253,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.4318181818181818,
						"acc_norm,none": 0.4318181818181818,
						"acc_norm_stderr,none": 0.07553702921752882,
						"acc_stderr,none": 0.07553702921752882,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.391304347826087,
						"acc_norm,none": 0.391304347826087,
						"acc_norm_stderr,none": 0.07275304578557182,
						"acc_stderr,none": 0.07275304578557182,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996394,
						"acc_stderr,none": 0.08081046758996394,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.3155758936280435,
						"acc_norm,none": 0.3155758936280435,
						"acc_norm_stderr,none": 0.06043464901317516,
						"acc_stderr,none": 0.06043464901317516,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.3136094674556213,
						"acc_norm,none": 0.3136094674556213,
						"acc_norm_stderr,none": 0.03579526516456226,
						"acc_stderr,none": 0.03579526516456226,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.28378378378378377,
						"acc_norm,none": 0.28378378378378377,
						"acc_norm_stderr,none": 0.03718409321285373,
						"acc_stderr,none": 0.03718409321285373,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.43125,
						"acc_norm,none": 0.43125,
						"acc_norm_stderr,none": 0.03927594984018918,
						"acc_stderr,none": 0.03927594984018918,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.03567969772268047,
						"acc_stderr,none": 0.03567969772268047,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3349282296650718,
						"acc_norm,none": 0.3349282296650718,
						"acc_norm_stderr,none": 0.03272491043051241,
						"acc_stderr,none": 0.03272491043051241,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3511450381679389,
						"acc_norm,none": 0.3511450381679389,
						"acc_norm_stderr,none": 0.04186445163013751,
						"acc_stderr,none": 0.04186445163013751,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.040718744426068945,
						"acc_stderr,none": 0.040718744426068945,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004222,
						"acc_stderr,none": 0.04742907046004222,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3560371517027864,
						"acc_norm,none": 0.3560371517027864,
						"acc_norm_stderr,none": 0.026683950692610883,
						"acc_stderr,none": 0.026683950692610883,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3137254901960784,
						"acc_norm,none": 0.3137254901960784,
						"acc_norm_stderr,none": 0.03256685484460388,
						"acc_stderr,none": 0.03256685484460388,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.39106145251396646,
						"acc_norm,none": 0.39106145251396646,
						"acc_norm_stderr,none": 0.03657625502786071,
						"acc_stderr,none": 0.03657625502786071,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24050632911392406,
						"acc_norm,none": 0.24050632911392406,
						"acc_norm_stderr,none": 0.027820781981149675,
						"acc_stderr,none": 0.027820781981149675,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800375,
						"acc_stderr,none": 0.04142972007800375,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.4485981308411215,
						"acc_norm,none": 0.4485981308411215,
						"acc_norm_stderr,none": 0.04830698295619321,
						"acc_stderr,none": 0.04830698295619321,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.37735849056603776,
						"acc_norm,none": 0.37735849056603776,
						"acc_norm_stderr,none": 0.047304390228528934,
						"acc_stderr,none": 0.047304390228528934,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809826,
						"acc_stderr,none": 0.039578354719809826,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.04176466758604903,
						"acc_stderr,none": 0.04176466758604903,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.27472527472527475,
						"acc_norm,none": 0.27472527472527475,
						"acc_norm_stderr,none": 0.027065504564389522,
						"acc_stderr,none": 0.027065504564389522,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3480392156862745,
						"acc_norm,none": 0.3480392156862745,
						"acc_norm_stderr,none": 0.03343311240488418,
						"acc_stderr,none": 0.03343311240488418,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.03377310252209194,
						"acc_stderr,none": 0.03377310252209194,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.038142800826175154,
						"acc_stderr,none": 0.038142800826175154,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2949640287769784,
						"acc_norm,none": 0.2949640287769784,
						"acc_norm_stderr,none": 0.03881956126735707,
						"acc_stderr,none": 0.03881956126735707,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.03767609312195345,
						"acc_stderr,none": 0.03767609312195345,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.34355828220858897,
						"acc_norm,none": 0.34355828220858897,
						"acc_norm_stderr,none": 0.03731133519673893,
						"acc_stderr,none": 0.03731133519673893,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.31976744186046513,
						"acc_norm,none": 0.31976744186046513,
						"acc_norm_stderr,none": 0.0356654553808481,
						"acc_stderr,none": 0.0356654553808481,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.27380952380952384,
						"acc_norm,none": 0.27380952380952384,
						"acc_norm_stderr,none": 0.028145741115683867,
						"acc_stderr,none": 0.028145741115683867,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.30808080808080807,
						"acc_norm,none": 0.30808080808080807,
						"acc_norm_stderr,none": 0.03289477330098614,
						"acc_stderr,none": 0.03289477330098614,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.44537815126050423,
						"acc_norm,none": 0.44537815126050423,
						"acc_norm_stderr,none": 0.032284106267163895,
						"acc_stderr,none": 0.032284106267163895,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.23478260869565218,
						"acc_norm,none": 0.23478260869565218,
						"acc_norm_stderr,none": 0.028009647070930125,
						"acc_stderr,none": 0.028009647070930125,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.038850042458002554,
						"acc_stderr,none": 0.038850042458002554,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.32167832167832167,
						"acc_norm,none": 0.32167832167832167,
						"acc_norm_stderr,none": 0.03919986517659165,
						"acc_stderr,none": 0.03919986517659165,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.30113636363636365,
						"acc_norm,none": 0.30113636363636365,
						"acc_norm_stderr,none": 0.03467837977202437,
						"acc_stderr,none": 0.03467837977202437,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.33557046979865773,
						"acc_norm,none": 0.33557046979865773,
						"acc_norm_stderr,none": 0.03881373830315734,
						"acc_stderr,none": 0.03881373830315734,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2966101694915254,
						"acc_norm,none": 0.2966101694915254,
						"acc_norm_stderr,none": 0.04222776832233627,
						"acc_stderr,none": 0.04222776832233627,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.04265792110940589,
						"acc_stderr,none": 0.04265792110940589,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.34265734265734266,
						"acc_norm,none": 0.34265734265734266,
						"acc_norm_stderr,none": 0.039827381778096436,
						"acc_stderr,none": 0.039827381778096436,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.30952380952380953,
						"acc_norm,none": 0.30952380952380953,
						"acc_norm_stderr,none": 0.04134913018303316,
						"acc_stderr,none": 0.04134913018303316,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336699,
						"acc_stderr,none": 0.03333068663336699,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.37209302325581395,
						"acc_norm,none": 0.37209302325581395,
						"acc_norm_stderr,none": 0.03696369368553605,
						"acc_stderr,none": 0.03696369368553605,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2749391727493917,
						"acc_norm,none": 0.2749391727493917,
						"acc_norm_stderr,none": 0.022050254355995075,
						"acc_stderr,none": 0.022050254355995075,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.42990654205607476,
						"acc_norm,none": 0.42990654205607476,
						"acc_norm_stderr,none": 0.033921125520669684,
						"acc_stderr,none": 0.033921125520669684,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3089430894308943,
						"acc_norm,none": 0.3089430894308943,
						"acc_norm_stderr,none": 0.041832732587876245,
						"acc_stderr,none": 0.041832732587876245,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3114754098360656,
						"acc_norm,none": 0.3114754098360656,
						"acc_norm_stderr,none": 0.0420996926731014,
						"acc_stderr,none": 0.0420996926731014,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3761904761904762,
						"acc_norm,none": 0.3761904761904762,
						"acc_norm_stderr,none": 0.03350863645112523,
						"acc_stderr,none": 0.03350863645112523,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3277777777777778,
						"acc_norm,none": 0.3277777777777778,
						"acc_norm_stderr,none": 0.03508485373860692,
						"acc_stderr,none": 0.03508485373860692,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3544973544973545,
						"acc_norm,none": 0.3544973544973545,
						"acc_norm_stderr,none": 0.034888041308433954,
						"acc_stderr,none": 0.034888041308433954,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.28448275862068967,
						"acc_norm,none": 0.28448275862068967,
						"acc_norm_stderr,none": 0.04207160755584019,
						"acc_stderr,none": 0.04207160755584019,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.31724137931034485,
						"acc_norm,none": 0.31724137931034485,
						"acc_norm_stderr,none": 0.03878352372138622,
						"acc_stderr,none": 0.03878352372138622,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3238095238095238,
						"acc_norm,none": 0.3238095238095238,
						"acc_norm_stderr,none": 0.04588414718067474,
						"acc_stderr,none": 0.04588414718067474,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.30857142857142855,
						"acc_norm,none": 0.30857142857142855,
						"acc_norm_stderr,none": 0.035016835199101176,
						"acc_stderr,none": 0.035016835199101176,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846666,
						"acc_stderr,none": 0.030469670650846666,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.26861702127659576,
						"acc_norm,none": 0.26861702127659576,
						"acc_norm_stderr,none": 0.022888827968077056,
						"acc_stderr,none": 0.022888827968077056,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.36637931034482757,
						"acc_norm,none": 0.36637931034482757,
						"acc_norm_stderr,none": 0.031701087100596985,
						"acc_stderr,none": 0.031701087100596985,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3563218390804598,
						"acc_norm,none": 0.3563218390804598,
						"acc_norm_stderr,none": 0.036410995772554904,
						"acc_stderr,none": 0.036410995772554904,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.0391545063041425,
						"acc_stderr,none": 0.0391545063041425,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.34513274336283184,
						"acc_norm,none": 0.34513274336283184,
						"acc_norm_stderr,none": 0.031694102698674474,
						"acc_stderr,none": 0.031694102698674474,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.34545454545454546,
						"acc_norm,none": 0.34545454545454546,
						"acc_norm_stderr,none": 0.03713158067481913,
						"acc_stderr,none": 0.03713158067481913,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3431952662721893,
						"acc_norm,none": 0.3431952662721893,
						"acc_norm_stderr,none": 0.03662976569681104,
						"acc_stderr,none": 0.03662976569681104,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.35403726708074534,
						"acc_norm,none": 0.35403726708074534,
						"acc_norm_stderr,none": 0.03780665290318812,
						"acc_stderr,none": 0.03780665290318812,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.3375,
						"acc_norm,none": 0.3375,
						"acc_norm_stderr,none": 0.03749999999999997,
						"acc_stderr,none": 0.03749999999999997,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.2816000061659921,
						"mcc_stderr,none": 0.029955045292457825
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.03379976689896309,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.412865235539654,
						"likelihood_diff_stderr,none": 0.500151919189628,
						"pct_stereotype,none": 0.6025641025641025,
						"pct_stereotype_stderr,none": 0.07164851760982685
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.6115086463923673,
						"likelihood_diff_stderr,none": 0.08573210091477856,
						"pct_stereotype,none": 0.6326774001192605,
						"pct_stereotype_stderr,none": 0.011775462623040696
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.100274725274725,
						"likelihood_diff_stderr,none": 0.3851754382213633,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.04865042554105199
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.454545454545454,
						"likelihood_diff_stderr,none": 1.8230899661819735,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.842307692307692,
						"likelihood_diff_stderr,none": 0.6171275647813974,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.59921875,
						"likelihood_diff_stderr,none": 0.1606151555747731,
						"pct_stereotype,none": 0.615625,
						"pct_stereotype_stderr,none": 0.027235813331371494
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.564236111111111,
						"likelihood_diff_stderr,none": 0.23498741646071672,
						"pct_stereotype,none": 0.5787037037037037,
						"pct_stereotype_stderr,none": 0.03367462138896078
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.8559027777777777,
						"likelihood_diff_stderr,none": 0.3143398472630477,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.3676181102362204,
						"likelihood_diff_stderr,none": 0.14732041142677213,
						"pct_stereotype,none": 0.5413385826771654,
						"pct_stereotype_stderr,none": 0.022129755490549064
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.5563063063063063,
						"likelihood_diff_stderr,none": 0.3415190850739457,
						"pct_stereotype,none": 0.7117117117117117,
						"pct_stereotype_stderr,none": 0.04318860867532051
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.955645161290323,
						"likelihood_diff_stderr,none": 0.4335338539665626,
						"pct_stereotype,none": 0.9032258064516129,
						"pct_stereotype_stderr,none": 0.03082364793244869
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.1947368421052635,
						"likelihood_diff_stderr,none": 0.24733583152726782,
						"pct_stereotype,none": 0.6736842105263158,
						"pct_stereotype_stderr,none": 0.034104864353344894
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.2129919499105544,
						"likelihood_diff_stderr,none": 0.07330623835210776,
						"pct_stereotype,none": 0.5724508050089445,
						"pct_stereotype_stderr,none": 0.012084400901134945
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.165277777777778,
						"likelihood_diff_stderr,none": 0.3069995177147646,
						"pct_stereotype,none": 0.5222222222222223,
						"pct_stereotype_stderr,none": 0.05294752255076824
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.8461538461538463,
						"likelihood_diff_stderr,none": 0.9663979766688592,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.78030303030303,
						"likelihood_diff_stderr,none": 0.3906137642305829,
						"pct_stereotype,none": 0.696969696969697,
						"pct_stereotype_stderr,none": 0.057002420795512765
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.680295950155763,
						"likelihood_diff_stderr,none": 0.12402583786763484,
						"pct_stereotype,none": 0.6074766355140186,
						"pct_stereotype_stderr,none": 0.027297480012962474
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.433794466403162,
						"likelihood_diff_stderr,none": 0.19851278867673497,
						"pct_stereotype,none": 0.40711462450592883,
						"pct_stereotype_stderr,none": 0.03094877404932307
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.673611111111111,
						"likelihood_diff_stderr,none": 0.44305077238993634,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.05594542388644592
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.792391304347826,
						"likelihood_diff_stderr,none": 0.13485778435929513,
						"pct_stereotype,none": 0.4608695652173913,
						"pct_stereotype_stderr,none": 0.023266421758066525
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.408695652173913,
						"likelihood_diff_stderr,none": 0.28985073413992113,
						"pct_stereotype,none": 0.7565217391304347,
						"pct_stereotype_stderr,none": 0.040196512608780724
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.631868131868132,
						"likelihood_diff_stderr,none": 0.30605760157924905,
						"pct_stereotype,none": 0.8021978021978022,
						"pct_stereotype_stderr,none": 0.041988952031962214
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.8536352040816326,
						"likelihood_diff_stderr,none": 0.24517895679957616,
						"pct_stereotype,none": 0.7295918367346939,
						"pct_stereotype_stderr,none": 0.03180772269593479
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.1171259842519685,
						"exact_match_stderr,none": 0.007135444500870198
					},
					"glue": {
						"acc,none": 0.7412756074321105,
						"acc_stderr,none": 0.004299458968138233,
						"alias": "glue",
						"f1,none": 0.6962593018323248,
						"f1_stderr,none": 0.00016282719694904701,
						"mcc,none": 0.2816000061659921,
						"mcc_stderr,none": 0.029955045292457825
					},
					"hellaswag": {
						"acc,none": 0.5375423222465644,
						"acc_norm,none": 0.7234614618601872,
						"acc_norm_stderr,none": 0.004463721071319086,
						"acc_stderr,none": 0.004975696076240848,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.29745885070747896,
						"acc_norm,none": 0.29745885070747896,
						"acc_norm_stderr,none": 0.03218073930545326,
						"acc_stderr,none": 0.03218073930545326,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.0416333199893227,
						"acc_stderr,none": 0.0416333199893227,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.297,
						"acc_norm,none": 0.297,
						"acc_norm_stderr,none": 0.0144568322948011,
						"acc_stderr,none": 0.0144568322948011,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.014512395033543143,
						"acc_stderr,none": 0.014512395033543143,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.014221154708434951,
						"acc_stderr,none": 0.014221154708434951,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774166,
						"acc_stderr,none": 0.013877773329774166,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.25666666666666665,
						"acc_norm,none": 0.25666666666666665,
						"acc_norm_stderr,none": 0.017846913889347057,
						"acc_stderr,none": 0.017846913889347057,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.324,
						"acc_norm,none": 0.324,
						"acc_norm_stderr,none": 0.014806864733738859,
						"acc_stderr,none": 0.014806864733738859,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.015090650341444233,
						"acc_stderr,none": 0.015090650341444233,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.338,
						"acc_norm,none": 0.338,
						"acc_norm_stderr,none": 0.014965960710224479,
						"acc_stderr,none": 0.014965960710224479,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.03248501780682212,
						"acc_stderr,none": 0.03248501780682212,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.31,
						"acc_norm,none": 0.31,
						"acc_norm_stderr,none": 0.014632638658632896,
						"acc_stderr,none": 0.014632638658632896,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2692307692307692,
						"acc_norm,none": 0.2692307692307692,
						"acc_norm_stderr,none": 0.03905328918744187,
						"acc_stderr,none": 0.03905328918744187,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.31,
						"acc_norm,none": 0.31,
						"acc_norm_stderr,none": 0.04648231987117316,
						"acc_stderr,none": 0.04648231987117316,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.304,
						"acc_norm,none": 0.304,
						"acc_norm_stderr,none": 0.014553205687950432,
						"acc_stderr,none": 0.014553205687950432,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.336,
						"acc_norm,none": 0.336,
						"acc_norm_stderr,none": 0.014944140233795023,
						"acc_stderr,none": 0.014944140233795023,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314142,
						"acc_stderr,none": 0.013644675781314142,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.311,
						"acc_norm,none": 0.311,
						"acc_norm_stderr,none": 0.014645596385722695,
						"acc_stderr,none": 0.014645596385722695,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.014326941797231561,
						"acc_stderr,none": 0.014326941797231561,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361427,
						"acc_stderr,none": 0.014498627873361427,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.304,
						"acc_norm,none": 0.304,
						"acc_norm_stderr,none": 0.014553205687950436,
						"acc_stderr,none": 0.014553205687950436,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.315,
						"acc_norm,none": 0.315,
						"acc_norm_stderr,none": 0.014696631960792511,
						"acc_stderr,none": 0.014696631960792511,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.04408440022768078,
						"acc_stderr,none": 0.04408440022768078,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.314,
						"acc_norm,none": 0.314,
						"acc_norm_stderr,none": 0.014683991951087964,
						"acc_stderr,none": 0.014683991951087964,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.015090650341444233,
						"acc_stderr,none": 0.015090650341444233,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.014356395999905687,
						"acc_stderr,none": 0.014356395999905687,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881418,
						"acc_stderr,none": 0.013588548437881418,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.014566646394664392,
						"acc_stderr,none": 0.014566646394664392,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.013493000446937594,
						"acc_stderr,none": 0.013493000446937594,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.2633333333333333,
						"acc_norm,none": 0.2633333333333333,
						"acc_norm_stderr,none": 0.01799595989202964,
						"acc_stderr,none": 0.01799595989202964,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485256,
						"acc_stderr,none": 0.014174516461485256,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361427,
						"acc_stderr,none": 0.014498627873361427,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.294,
						"acc_norm,none": 0.294,
						"acc_norm_stderr,none": 0.014414290540008224,
						"acc_stderr,none": 0.014414290540008224,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.341,
						"acc_norm,none": 0.341,
						"acc_norm_stderr,none": 0.01499813134840272,
						"acc_stderr,none": 0.01499813134840272,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.04163331998932269,
						"acc_stderr,none": 0.04163331998932269,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.25333333333333335,
						"acc_norm,none": 0.25333333333333335,
						"acc_norm_stderr,none": 0.02515208293771192,
						"acc_stderr,none": 0.02515208293771192,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.014127086556490528,
						"acc_stderr,none": 0.014127086556490528,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.014512395033543157,
						"acc_stderr,none": 0.014512395033543157,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.014512395033543153,
						"acc_stderr,none": 0.014512395033543153,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.03128528159088721,
						"acc_stderr,none": 0.03128528159088721,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.01401329270272948,
						"acc_stderr,none": 0.01401329270272948,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.284,
						"acc_norm,none": 0.284,
						"acc_norm_stderr,none": 0.01426700906103131,
						"acc_stderr,none": 0.01426700906103131,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.030488073292114216,
						"acc_stderr,none": 0.030488073292114216,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.365,
						"acc_norm,none": 0.365,
						"acc_norm_stderr,none": 0.015231776226264905,
						"acc_stderr,none": 0.015231776226264905,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5242271431703573,
						"acc_norm,none": 0.56,
						"acc_norm_stderr,none": 0.0004937875751502988,
						"acc_stderr,none": 0.04314707203196712,
						"alias": "kobest",
						"f1,none": 0.4235645891948346,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5170940170940171,
						"acc_stderr,none": 0.013340964053138492,
						"alias": " - kobest_boolq",
						"f1,none": 0.36910000954390726,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.623,
						"acc_stderr,none": 0.015333170125779847,
						"alias": " - kobest_copa",
						"f1,none": 0.6217711344157166,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.438,
						"acc_norm,none": 0.56,
						"acc_norm_stderr,none": 0.022221331534143015,
						"acc_stderr,none": 0.022210326363977417,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.43242888311135436,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5214105793450882,
						"acc_stderr,none": 0.025102898696363056,
						"alias": " - kobest_sentineg",
						"f1,none": 0.39928962793068296,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4888888888888889,
						"acc_stderr,none": 0.014088017407699532,
						"alias": " - kobest_wic",
						"f1,none": 0.3310778727445394,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7152144381913449,
						"acc_stderr,none": 0.017511765798143505,
						"alias": "lambada",
						"perplexity,none": 3.6535139116335005,
						"perplexity_stderr,none": 0.18871387326676559
					},
					"lambada_cloze": {
						"acc,none": 0.02852707160877159,
						"acc_stderr,none": 0.0026398219610884563,
						"alias": "lambada_cloze",
						"perplexity,none": 563.4337457991705,
						"perplexity_stderr,none": 121.96836321126048
					},
					"lambada_multilingual": {
						"acc,none": 0.5405394915583156,
						"acc_stderr,none": 0.0846117306162093,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.86571150485598,
						"perplexity_stderr,none": 8.150236500814422
					},
					"lambada_openai": {
						"acc,none": 0.7463613429070445,
						"acc_stderr,none": 0.006061698956508256,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3065743677448243,
						"perplexity_stderr,none": 0.06447553247686984
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.026004269357655735,
						"acc_stderr,none": 0.002217241136095807,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 322.67596485237954,
						"perplexity_stderr,none": 10.234987688528612
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4292645061129439,
						"acc_stderr,none": 0.006895916655437432,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.16979539785279,
						"perplexity_stderr,none": 1.888549660989399
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7469435280419173,
						"acc_stderr,none": 0.006057099133599549,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3054080070412657,
						"perplexity_stderr,none": 0.06431752960125509
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45992625654958275,
						"acc_stderr,none": 0.006943568216279233,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.719359163325066,
						"perplexity_stderr,none": 1.408045326101042
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5503590141665049,
						"acc_stderr,none": 0.006930555736225025,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.43941474508528,
						"perplexity_stderr,none": 0.7979328629768947
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5162041529206287,
						"acc_stderr,none": 0.006962318518940985,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.69458021097549,
						"perplexity_stderr,none": 1.1457849101533033
					},
					"lambada_standard": {
						"acc,none": 0.6825150397826509,
						"acc_stderr,none": 0.006485295423198008,
						"alias": " - lambada_standard",
						"perplexity,none": 4.000949149444625,
						"perplexity_stderr,none": 0.08197955697609033
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.031049873859887445,
						"acc_stderr,none": 0.0024165328581617737,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 804.1915267459616,
						"perplexity_stderr,none": 25.747151133668346
					},
					"logiqa": {
						"acc,none": 0.2488479262672811,
						"acc_norm,none": 0.29339477726574503,
						"acc_norm_stderr,none": 0.017859032704399515,
						"acc_stderr,none": 0.016957985904525578,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2582697201017812,
						"acc_norm,none": 0.28880407124681934,
						"acc_norm_stderr,none": 0.011434263441269488,
						"acc_stderr,none": 0.011042608058378027,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2458961474036851,
						"acc_norm,none": 0.2509212730318258,
						"acc_norm_stderr,none": 0.007936573884076006,
						"acc_stderr,none": 0.007883009185091524,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.416437195509426,
						"acc_stderr,none": 0.005073522130552187,
						"alias": "mc_taco",
						"f1,none": 0.5279300891021247,
						"f1_stderr,none": 0.005625939483830985
					},
					"medmcqa": {
						"acc,none": 0.34377241214439397,
						"acc_norm,none": 0.34377241214439397,
						"acc_norm_stderr,none": 0.007344643805753146,
						"acc_stderr,none": 0.007344643805753146,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3715632364493323,
						"acc_norm,none": 0.3715632364493323,
						"acc_norm_stderr,none": 0.013548886496306757,
						"acc_stderr,none": 0.013548886496306757,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.3946019085600342,
						"acc_stderr,none": 0.08539276370984222,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4148148148148148,
						"acc_stderr,none": 0.042561937679014075,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.40131578947368424,
						"acc_stderr,none": 0.039889037033362836,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252604,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3849056603773585,
						"acc_stderr,none": 0.02994649856769995,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4027777777777778,
						"acc_stderr,none": 0.04101405519842426,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001974,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695236,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3583815028901734,
						"acc_stderr,none": 0.036563436533531585,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.040925639582376556,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.05021167315686781,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3617021276595745,
						"acc_stderr,none": 0.031410821975962386,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2543859649122807,
						"acc_stderr,none": 0.040969851398436695,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4413793103448276,
						"acc_stderr,none": 0.04137931034482758,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.28835978835978837,
						"acc_stderr,none": 0.023330654054535882,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.042163702135578345,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4483870967741935,
						"acc_stderr,none": 0.028292056830112728,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.28078817733990147,
						"acc_stderr,none": 0.03161856335358611,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.049431107042371025,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5393939393939394,
						"acc_stderr,none": 0.03892207016552013,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4444444444444444,
						"acc_stderr,none": 0.03540294377095368,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5492227979274611,
						"acc_stderr,none": 0.035909109522355265,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.36923076923076925,
						"acc_stderr,none": 0.024468615241478923,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2851851851851852,
						"acc_stderr,none": 0.027528599210340496,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.38235294117647056,
						"acc_stderr,none": 0.031566630992154156,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.24503311258278146,
						"acc_stderr,none": 0.035118075718047245,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5174311926605505,
						"acc_stderr,none": 0.02142429187185315,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.24537037037037038,
						"acc_stderr,none": 0.02934666509437295,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.49019607843137253,
						"acc_stderr,none": 0.03508637358630573,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5274261603375527,
						"acc_stderr,none": 0.03249822718301303,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3811659192825112,
						"acc_stderr,none": 0.03259625118416827,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5038167938931297,
						"acc_stderr,none": 0.04385162325601553,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.369394261424017,
						"acc_stderr,none": 0.08311216866103506,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4132231404958678,
						"acc_stderr,none": 0.04495087843548408,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.42592592592592593,
						"acc_stderr,none": 0.0478034362693679,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.36809815950920244,
						"acc_stderr,none": 0.03789213935838396,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3125,
						"acc_stderr,none": 0.043994650575715215,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5048543689320388,
						"acc_stderr,none": 0.049505043821289195,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6239316239316239,
						"acc_stderr,none": 0.03173393632969481,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.049888765156985884,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.561941251596424,
						"acc_stderr,none": 0.01774223223825724,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.36127167630057805,
						"acc_stderr,none": 0.025862201852277885,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.21564245810055865,
						"acc_stderr,none": 0.013754835975482346,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.434640522875817,
						"acc_stderr,none": 0.028384256704883034,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.4451239137431606,
						"acc_stderr,none": 0.08563176931775165,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4758842443729904,
						"acc_stderr,none": 0.028365041542564584,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4506172839506173,
						"acc_stderr,none": 0.0276847214156562,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2872340425531915,
						"acc_stderr,none": 0.026992199173064356,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3318122555410691,
						"acc_stderr,none": 0.012026088259897628,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4411764705882353,
						"acc_stderr,none": 0.030161911930767102,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.4068627450980392,
						"acc_stderr,none": 0.019873802005061177,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.44545454545454544,
						"acc_stderr,none": 0.047605488214603246,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3306122448979592,
						"acc_stderr,none": 0.030116426296540592,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4387390315242119,
						"acc_stderr,none": 0.06996070742463358,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5522388059701493,
						"acc_stderr,none": 0.035161847729521675,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3393593403108151,
						"acc_stderr,none": 0.07698857772273254,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.05009082659620332,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.0371172519074075,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6140350877192983,
						"acc_stderr,none": 0.03733756969066163,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7723892002037698,
						"acc_stderr,none": 0.004232446741789454,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7734947111472742,
						"acc_stderr,none": 0.004221521444855474,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7328431372549019,
						"acc_stderr,none": 0.021932668544150206,
						"alias": "mrpc",
						"f1,none": 0.8299531981279251,
						"f1_stderr,none": 0.01608930238120449
					},
					"multimedqa": {
						"acc,none": 0.38183108587650816,
						"acc_norm,none": 0.35166449926396326,
						"acc_norm_stderr,none": 0.00011553511229547551,
						"acc_stderr,none": 0.08452727048470678,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5408415841584159,
						"acc_stderr,none": 0.0071578040098146285,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7057938315731139,
						"mrr_stderr,none": 0.010396649515227518,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407455,
						"r@2,none": 0.40632054176072235,
						"r@2_stderr,none": 0.016509684167298446
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6553799865208145,
						"mrr_stderr,none": 0.010442154141010481,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4706546275395034,
						"r@2_stderr,none": 0.016778343895001428
					},
					"openbookqa": {
						"acc,none": 0.292,
						"acc_norm,none": 0.408,
						"acc_norm_stderr,none": 0.02200091089387719,
						"acc_stderr,none": 0.020354375480530082,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.451,
						"acc_stderr,none": 0.011129305041886325,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.010856285251628968,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.419,
						"acc_stderr,none": 0.01103541527062293,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5465,
						"acc_stderr,none": 0.011134669525078671,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.539,
						"acc_stderr,none": 0.011149065020234333,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.011154111668060216,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5265,
						"acc_stderr,none": 0.011167418260963935,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48542857142857143,
						"acc_stderr,none": 0.052647164906520216,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7752992383025027,
						"acc_norm,none": 0.7889009793253536,
						"acc_norm_stderr,none": 0.00952137737873414,
						"acc_stderr,none": 0.009738282586548361,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.23660333048676346,
						"acc_norm,none": 0.2843723313407344,
						"acc_norm_stderr,none": 0.0032957964550634875,
						"acc_stderr,none": 0.003104979295138653,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.02051442622562804,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7344689678890879,
						"acc_norm,none": 0.6408365677485831,
						"acc_norm_stderr,none": 0.0099957622064333,
						"acc_stderr,none": 0.16090150899679626,
						"alias": "pythia",
						"bits_per_byte,none": 0.6332968939538636,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5511055837419194,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3065743677448243,
						"perplexity_stderr,none": 0.06447553247686984,
						"word_perplexity,none": 10.457918885929645,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3882978723404255,
						"acc_norm,none": 0.4397163120567376,
						"acc_norm_stderr,none": 0.05922186888158541,
						"acc_stderr,none": 0.04593587745589221,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.45,
						"acc_norm,none": 0.5416666666666666,
						"acc_norm_stderr,none": 0.04567549854280213,
						"acc_stderr,none": 0.04560517440787952,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3375,
						"acc_norm,none": 0.4625,
						"acc_norm_stderr,none": 0.039540899134978165,
						"acc_stderr,none": 0.03749999999999997,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3908450704225352,
						"acc_norm,none": 0.38380281690140844,
						"acc_norm_stderr,none": 0.02890817768804618,
						"acc_stderr,none": 0.029005007569909827,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.75767994063814,
						"acc_stderr,none": 0.002131037433272059,
						"alias": "qqp",
						"f1,none": 0.6950919672590333,
						"f1_stderr,none": 0.002927801148693936
					},
					"race": {
						"acc,none": 0.34832535885167465,
						"acc_stderr,none": 0.014745439038731609,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2688,
						"em_stderr,none": 0.004433578877404376,
						"f1,none": 0.27838523833453654,
						"f1_stderr,none": 0.004444793720597664
					},
					"rte": {
						"acc,none": 0.6750902527075813,
						"acc_stderr,none": 0.028190822551170353,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.959,
						"acc_norm,none": 0.951,
						"acc_norm_stderr,none": 0.0068297617561409165,
						"acc_stderr,none": 0.006273624021118787,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6714801444043321,
						"acc_stderr,none": 0.028271109855219828,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8772935779816514,
						"acc_stderr,none": 0.01111724160326852,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5755773268019594,
						"acc_norm,none": 0.7699690092972108,
						"acc_norm_stderr,none": 0.0029755038724781423,
						"acc_stderr,none": 0.003494474287505035,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.689993677415061,
						"acc_stderr,none": 0.06865731276455735,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6255008012820513,
						"acc_stderr,none": 0.004844052551786333,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8724029593594811,
						"acc_stderr,none": 0.003358986902683246,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5766666666666667,
						"acc_stderr,none": 0.004892432713375922,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3252621810611377,
						"acc_stderr,none": 0.0014219022311066855,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.016095884155386844,
						"bleu_diff,none": -7.984552716030973,
						"bleu_diff_stderr,none": 0.8609552262410113,
						"bleu_max,none": 26.513491067585093,
						"bleu_max_stderr,none": 0.7987765319759287,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.015550778332842885,
						"rouge1_diff,none": -9.950793269593063,
						"rouge1_diff_stderr,none": 0.9386833036851832,
						"rouge1_max,none": 52.022729904553884,
						"rouge1_max_stderr,none": 0.8649333366072448,
						"rouge2_acc,none": 0.25091799265605874,
						"rouge2_acc_stderr,none": 0.015176985027707696,
						"rouge2_diff,none": -12.01821276266762,
						"rouge2_diff_stderr,none": 1.1294865215426677,
						"rouge2_max,none": 35.82862702274702,
						"rouge2_max_stderr,none": 1.0167634694388548,
						"rougeL_acc,none": 0.27050183598531213,
						"rougeL_acc_stderr,none": 0.015550778332842881,
						"rougeL_diff,none": -10.408699238302749,
						"rougeL_diff_stderr,none": 0.9535276857579184,
						"rougeL_max,none": 49.06780650591617,
						"rougeL_max_stderr,none": 0.8810583542317346
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.016095884155386844,
						"bleu_diff,none": -7.984552716030973,
						"bleu_diff_stderr,none": 0.8609552262410113,
						"bleu_max,none": 26.513491067585093,
						"bleu_max_stderr,none": 0.7987765319759287,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.015550778332842885,
						"rouge1_diff,none": -9.950793269593063,
						"rouge1_diff_stderr,none": 0.9386833036851832,
						"rouge1_max,none": 52.022729904553884,
						"rouge1_max_stderr,none": 0.8649333366072448,
						"rouge2_acc,none": 0.25091799265605874,
						"rouge2_acc_stderr,none": 0.015176985027707696,
						"rouge2_diff,none": -12.01821276266762,
						"rouge2_diff_stderr,none": 1.1294865215426677,
						"rouge2_max,none": 35.82862702274702,
						"rouge2_max_stderr,none": 1.0167634694388548,
						"rougeL_acc,none": 0.27050183598531213,
						"rougeL_acc_stderr,none": 0.015550778332842881,
						"rougeL_diff,none": -10.408699238302749,
						"rougeL_diff_stderr,none": 0.9535276857579184,
						"rougeL_max,none": 49.06780650591617,
						"rougeL_max_stderr,none": 0.8810583542317346
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2558139534883721,
						"acc_stderr,none": 0.01527417621928336,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.39471040863390316,
						"acc_stderr,none": 0.01406282580660718,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.1171259842519685,
						"exact_match_stderr,none": 0.007135444500870202
					},
					"wic": {
						"acc,none": 0.5877742946708464,
						"acc_stderr,none": 0.019503076004019976,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6332968939538636,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5511055837419194,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.457918885929645,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7111286503551697,
						"acc_stderr,none": 0.012738241271018434,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4788732394366197,
						"acc_stderr,none": 0.05970805879899504,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.4423076923076923,
						"acc_stderr,none": 0.04893740777701,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8717948717948718,
						"acc_stderr,none": 0.02027101064210495,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6221818181818182,
						"acc_stderr,none": 0.07145433936784358,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.604,
						"acc_stderr,none": 0.021893529941665813,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.020514426225628053,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.01968468882019472,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.022270877485360437,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228134,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.02206494331392886,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.021352091786223104,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.020055833888070893,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.020514426225628046,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43777777777777777,
						"acc_stderr,none": 0.05138594243757625,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.009460223484996469,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4783132530120482,
						"acc_stderr,none": 0.010012641367065514,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4827309236947791,
						"acc_stderr,none": 0.010016093498409704,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39076305220883534,
						"acc_stderr,none": 0.00977996757994179,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5285140562248996,
						"acc_stderr,none": 0.010005762674605274,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.510441767068273,
						"acc_stderr,none": 0.010019887205677444,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5048192771084338,
						"acc_stderr,none": 0.010021607322475494,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42730923694779116,
						"acc_stderr,none": 0.009915595034908124,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4859437751004016,
						"acc_stderr,none": 0.010018111813088548,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39558232931726905,
						"acc_stderr,none": 0.009801094347134985,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.4036144578313253,
						"acc_stderr,none": 0.009834096424955396,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44899598393574297,
						"acc_stderr,none": 0.009969793477240826,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40602409638554215,
						"acc_stderr,none": 0.009843462007384235,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41927710843373495,
						"acc_stderr,none": 0.009890599137391931,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.009556642460138149,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6294446784188676,
						"acc_stderr,none": 0.060553599861075065,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5923229649238915,
						"acc_stderr,none": 0.012645876488040305,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7782925215089345,
						"acc_stderr,none": 0.010689887294959685,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7107875579086698,
						"acc_stderr,none": 0.01166782538830548,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5678358702845797,
						"acc_stderr,none": 0.012748153864597583,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5943084050297816,
						"acc_stderr,none": 0.012636170220503925,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6644606221045665,
						"acc_stderr,none": 0.012151164438163909,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5466578424884183,
						"acc_stderr,none": 0.012810980537828157,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6796823295830576,
						"acc_stderr,none": 0.012007565507943376,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5545996029119789,
						"acc_stderr,none": 0.012790178438084814,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5903375248180013,
						"acc_stderr,none": 0.012655369030750352,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6446062210456651,
						"acc_stderr,none": 0.01231724793041838,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8186109238031019,
						"acc_stderr,none": 0.037777648582528064,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8812903225806452,
						"acc_stderr,none": 0.006709412618684158,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7228915662650602,
						"acc_stderr,none": 0.04942589299783093,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.748696558915537,
						"acc_stderr,none": 0.014014234546353824,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7908745247148289,
						"acc_stderr,none": 0.025125031682933376,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6857142857142857,
						"acc_stderr,none": 0.026198057744026403,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7757936507936508,
						"acc_stderr,none": 0.018595723133309875,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C2-rwkv-250_pth"
	},
	"./rwkv-x-dev/1_3-C2-rwkv-648_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6378241262683202,
						"acc_norm,none": 0.636978579481398,
						"acc_norm_stderr,none": 0.08961903210082431,
						"acc_stderr,none": 0.1075395917783219,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4834375,
						"acc_stderr,none": 0.047705775721266164,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.04385,
						"acc_stderr,none": 0.040697377299739775,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8111194029850747,
						"acc_stderr,none": 0.16465184992037962,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.27637444279346207,
						"acc_norm,none": 0.27637444279346207,
						"acc_norm_stderr,none": 0.12699034167894258,
						"acc_stderr,none": 0.12699034167894258,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.3056466931445346,
						"acc_norm,none": 0.3056466931445346,
						"acc_norm_stderr,none": 0.057336959788024854,
						"acc_stderr,none": 0.057336959788024854,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.40933400417412,
						"likelihood_diff_stderr,none": 0.4962928995198181,
						"pct_stereotype,none": 0.5988372093023255,
						"pct_stereotype_stderr,none": 0.07252713733661571
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.12303149606299213,
						"exact_match_stderr,none": 0.007288617993812068
					},
					"glue": {
						"acc,none": 0.7428090757503573,
						"acc_stderr,none": 0.004389268188865825,
						"alias": "glue",
						"f1,none": 0.6815169910255877,
						"f1_stderr,none": 0.00019963595447268213,
						"mcc,none": 0.19378948210792704,
						"mcc_stderr,none": 0.029047826464288228
					},
					"kmmlu": {
						"acc,none": 0.28533063817499277,
						"acc_norm,none": 0.28533063817499277,
						"acc_norm_stderr,none": 0.0293703816800794,
						"acc_stderr,none": 0.0293703816800794,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.536066652049989,
						"acc_norm,none": 0.554,
						"acc_norm_stderr,none": 0.0004951583166332651,
						"acc_stderr,none": 0.04719968136553444,
						"alias": "kobest",
						"f1,none": 0.45268876755448434,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7164758393169027,
						"acc_stderr,none": 0.017011820691953353,
						"alias": "lambada",
						"perplexity,none": 3.6514505851412062,
						"perplexity_stderr,none": 0.18670037759927946
					},
					"lambada_cloze": {
						"acc,none": 0.025519115078594994,
						"acc_stderr,none": 0.002285582325747542,
						"alias": "lambada_cloze",
						"perplexity,none": 605.5214258462441,
						"perplexity_stderr,none": 147.51755981719484
					},
					"lambada_multilingual": {
						"acc,none": 0.5402678051620415,
						"acc_stderr,none": 0.08533485225340719,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.018212835843947,
						"perplexity_stderr,none": 8.182202479820372
					},
					"mmlu": {
						"acc,none": 0.39638228172624984,
						"acc_stderr,none": 0.08490541424618747,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3706695005313496,
						"acc_stderr,none": 0.08460582538198343,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.44383649822980364,
						"acc_stderr,none": 0.08449126992238441,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4436139096522587,
						"acc_stderr,none": 0.06922719357417789,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3418966064065968,
						"acc_stderr,none": 0.07457392642719728,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.3877927608232789,
						"acc_norm,none": 0.35722933321011047,
						"acc_norm_stderr,none": 0.00011139501530609095,
						"acc_stderr,none": 0.08499791955465495,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4838571428571429,
						"acc_stderr,none": 0.05204997217747734,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.734860017935992,
						"acc_norm,none": 0.6410180572742012,
						"acc_norm_stderr,none": 0.010073944915983306,
						"acc_stderr,none": 0.1545591361739994,
						"alias": "pythia",
						"bits_per_byte,none": 0.6329207167167666,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.550701191592826,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3049986552890807,
						"perplexity_stderr,none": 0.06436847033786772,
						"word_perplexity,none": 10.44334728436579,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.39361702127659576,
						"acc_norm,none": 0.4432624113475177,
						"acc_norm_stderr,none": 0.05496421099943326,
						"acc_stderr,none": 0.04134187998803396,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6779142125054075,
						"acc_stderr,none": 0.07049456813218703,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.32629359117380474,
						"acc_stderr,none": 0.0013742304972474315,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -7.7889428962189635,
						"bleu_diff_stderr,none": 0.8805957382814702,
						"bleu_max,none": 26.912894718240686,
						"bleu_max_stderr,none": 0.7951558019921323,
						"rouge1_acc,none": 0.2741738066095471,
						"rouge1_acc_stderr,none": 0.015616518497219354,
						"rouge1_diff,none": -9.76692863997879,
						"rouge1_diff_stderr,none": 0.961696878023807,
						"rouge1_max,none": 52.57017392197321,
						"rouge1_max_stderr,none": 0.8615452352094476,
						"rouge2_acc,none": 0.2533659730722154,
						"rouge2_acc_stderr,none": 0.015225899340826856,
						"rouge2_diff,none": -11.809545537416474,
						"rouge2_diff_stderr,none": 1.1570613146672923,
						"rouge2_max,none": 36.366743420737464,
						"rouge2_max_stderr,none": 1.0111827239857785,
						"rougeL_acc,none": 0.2766217870257038,
						"rougeL_acc_stderr,none": 0.01565960575532691,
						"rougeL_diff,none": -10.058860640320813,
						"rougeL_diff_stderr,none": 0.9810757717447302,
						"rougeL_max,none": 49.643806300008606,
						"rougeL_max_stderr,none": 0.8780169244542013
					},
					"xcopa": {
						"acc,none": 0.6223636363636363,
						"acc_stderr,none": 0.07070266932313447,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4380187416331995,
						"acc_stderr,none": 0.04869554410247458,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6284820407917695,
						"acc_stderr,none": 0.05447442472750628,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8172623061362104,
						"acc_stderr,none": 0.038827538141508404,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6378241262683202,
						"acc_norm,none": 0.636978579481398,
						"acc_norm_stderr,none": 0.08961903210082431,
						"acc_stderr,none": 0.1075395917783219,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4834375,
						"acc_stderr,none": 0.047705775721266164,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.581,
						"acc_stderr,none": 0.015610338967577797,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.015704987954361798,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.43833333333333335,
						"acc_stderr,none": 0.014329531211422586,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4104095563139932,
						"acc_norm,none": 0.44795221843003413,
						"acc_norm_stderr,none": 0.014532011498211667,
						"acc_stderr,none": 0.01437492219264266,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.75,
						"acc_norm,none": 0.7302188552188552,
						"acc_norm_stderr,none": 0.009107527914671064,
						"acc_stderr,none": 0.008885233166386385,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.04385,
						"acc_stderr,none": 0.040697377299739775,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0145,
						"acc_stderr,none": 0.002673658397142787,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.162,
						"acc_stderr,none": 0.008240871069127843,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0705,
						"acc_stderr,none": 0.005725492610493554,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.015,
						"acc_stderr,none": 0.0027186753387999567,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.016,
						"acc_stderr,none": 0.0028064101569415328,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.07,
						"acc_stderr,none": 0.005706687580512206,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.002,
						"acc_stderr,none": 0.000999249343069499,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0325,
						"acc_stderr,none": 0.003966073608738825,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.048,
						"acc_stderr,none": 0.004781153596660219,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0021691973969631237,
						"acc_stderr,none": 0.0009692521054558653,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8111194029850747,
						"acc_stderr,none": 0.16465184992037962,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340966,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.0043194510829106525,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437803,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.011772110370812185,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.013019735539307792,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.01571976816340209,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.0113589183034753,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837956,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030084,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323497,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897883,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.0072744014816970674,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524298,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487914,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698457,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.683,
						"acc_stderr,none": 0.014721675438880227,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.675,
						"acc_stderr,none": 0.014818724459095526,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.748,
						"acc_stderr,none": 0.013736254390651152,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291603,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108652,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689071,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.443,
						"acc_stderr,none": 0.015716169953204105,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787745,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.811,
						"acc_stderr,none": 0.012386784588117714,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.015615500115072956,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.703,
						"acc_stderr,none": 0.014456832294801108,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315165,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408039,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.00812557844248793,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996676,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.01460648312734276,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345688,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.319,
						"acc_stderr,none": 0.014746404865473491,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.577,
						"acc_stderr,none": 0.015630589090476342,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.015801065586651758,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.010878848714333315,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.015813097547730984,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525057,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248114,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.772,
						"acc_stderr,none": 0.013273740700804478,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796379,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578002,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400252,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.0114199130650987,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.525,
						"acc_stderr,none": 0.015799513429996012,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557416,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108663,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611494,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.014385511563477345,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.435,
						"acc_stderr,none": 0.0156850572527172,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849879,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823333,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.599,
						"acc_stderr,none": 0.015506109745498329,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992445,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713329,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.711,
						"acc_stderr,none": 0.014341711358296183,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.01162816469672717,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280309,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318241,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679195,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.351,
						"acc_stderr,none": 0.015100563798316407,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.289,
						"acc_stderr,none": 0.014341711358296193,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7018348623853211,
						"acc_stderr,none": 0.008000892584151422,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8571428571428571,
						"acc_stderr,none": 0.04718416136255828,
						"alias": "cb",
						"f1,none": 0.6869845948696355,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.27637444279346207,
						"acc_norm,none": 0.27637444279346207,
						"acc_norm_stderr,none": 0.12699034167894258,
						"acc_stderr,none": 0.12699034167894258,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5454545454545454,
						"acc_norm,none": 0.5454545454545454,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.0659529705144534,
						"acc_stderr,none": 0.0659529705144534,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.05763033956734372,
						"acc_stderr,none": 0.05763033956734372,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0606060606060606,
						"acc_stderr,none": 0.0606060606060606,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.12909944487358055,
						"acc_stderr,none": 0.12909944487358055,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.11433239009500591,
						"acc_stderr,none": 0.11433239009500591,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.14213381090374033,
						"acc_stderr,none": 0.14213381090374033,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.10154334054280735,
						"acc_stderr,none": 0.10154334054280735,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.08742975048915691,
						"acc_stderr,none": 0.08742975048915691,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.1836734693877551,
						"acc_norm,none": 0.1836734693877551,
						"acc_norm_stderr,none": 0.05589005688828226,
						"acc_stderr,none": 0.05589005688828226,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.38636363636363635,
						"acc_norm,none": 0.38636363636363635,
						"acc_norm_stderr,none": 0.07425392901036848,
						"acc_stderr,none": 0.07425392901036848,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.391304347826087,
						"acc_norm,none": 0.391304347826087,
						"acc_norm_stderr,none": 0.07275304578557182,
						"acc_stderr,none": 0.07275304578557182,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.3056466931445346,
						"acc_norm,none": 0.3056466931445346,
						"acc_norm_stderr,none": 0.057336959788024854,
						"acc_stderr,none": 0.057336959788024854,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03560846537586734,
						"acc_stderr,none": 0.03560846537586734,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2905405405405405,
						"acc_norm,none": 0.2905405405405405,
						"acc_norm_stderr,none": 0.03744626397928733,
						"acc_stderr,none": 0.03744626397928733,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.4125,
						"acc_norm,none": 0.4125,
						"acc_norm_stderr,none": 0.039040677866833816,
						"acc_stderr,none": 0.039040677866833816,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.03567969772268047,
						"acc_stderr,none": 0.03567969772268047,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3397129186602871,
						"acc_norm,none": 0.3397129186602871,
						"acc_norm_stderr,none": 0.032839063537459336,
						"acc_stderr,none": 0.032839063537459336,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865142,
						"acc_stderr,none": 0.03462157845865142,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3053435114503817,
						"acc_norm,none": 0.3053435114503817,
						"acc_norm_stderr,none": 0.040393149787245605,
						"acc_stderr,none": 0.040393149787245605,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.040718744426068945,
						"acc_stderr,none": 0.040718744426068945,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3644859813084112,
						"acc_norm,none": 0.3644859813084112,
						"acc_norm_stderr,none": 0.046746602211107734,
						"acc_stderr,none": 0.046746602211107734,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.32507739938080493,
						"acc_norm,none": 0.32507739938080493,
						"acc_norm_stderr,none": 0.02610312109754256,
						"acc_stderr,none": 0.02610312109754256,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.03166009679399813,
						"acc_stderr,none": 0.03166009679399813,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.37988826815642457,
						"acc_norm,none": 0.37988826815642457,
						"acc_norm_stderr,none": 0.0363791806643084,
						"acc_stderr,none": 0.0363791806643084,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24050632911392406,
						"acc_norm,none": 0.24050632911392406,
						"acc_norm_stderr,none": 0.027820781981149675,
						"acc_stderr,none": 0.027820781981149675,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.040225592469367126,
						"acc_stderr,none": 0.040225592469367126,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.411214953271028,
						"acc_norm,none": 0.411214953271028,
						"acc_norm_stderr,none": 0.04779251692801369,
						"acc_stderr,none": 0.04779251692801369,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809826,
						"acc_stderr,none": 0.039578354719809826,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.041175810978451015,
						"acc_stderr,none": 0.041175810978451015,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.041988576623712234,
						"acc_stderr,none": 0.041988576623712234,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2783882783882784,
						"acc_norm,none": 0.2783882783882784,
						"acc_norm_stderr,none": 0.02717645531875414,
						"acc_stderr,none": 0.02717645531875414,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3431372549019608,
						"acc_norm,none": 0.3431372549019608,
						"acc_norm_stderr,none": 0.03332139944668086,
						"acc_stderr,none": 0.03332139944668086,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.03377310252209195,
						"acc_stderr,none": 0.03377310252209195,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2925170068027211,
						"acc_norm,none": 0.2925170068027211,
						"acc_norm_stderr,none": 0.03764931984085173,
						"acc_stderr,none": 0.03764931984085173,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.26618705035971224,
						"acc_norm,none": 0.26618705035971224,
						"acc_norm_stderr,none": 0.037622409350890895,
						"acc_stderr,none": 0.037622409350890895,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.03767609312195345,
						"acc_stderr,none": 0.03767609312195345,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.34355828220858897,
						"acc_norm,none": 0.34355828220858897,
						"acc_norm_stderr,none": 0.03731133519673893,
						"acc_stderr,none": 0.03731133519673893,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.3023255813953488,
						"acc_norm,none": 0.3023255813953488,
						"acc_norm_stderr,none": 0.03512091263428369,
						"acc_stderr,none": 0.03512091263428369,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.028271399816988552,
						"acc_stderr,none": 0.028271399816988552,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2878787878787879,
						"acc_norm,none": 0.2878787878787879,
						"acc_norm_stderr,none": 0.03225883512300992,
						"acc_stderr,none": 0.03225883512300992,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.44537815126050423,
						"acc_norm,none": 0.44537815126050423,
						"acc_norm_stderr,none": 0.032284106267163895,
						"acc_stderr,none": 0.032284106267163895,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2391304347826087,
						"acc_norm,none": 0.2391304347826087,
						"acc_norm_stderr,none": 0.028187385293933945,
						"acc_stderr,none": 0.028187385293933945,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.0391545063041425,
						"acc_stderr,none": 0.0391545063041425,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3146853146853147,
						"acc_norm,none": 0.3146853146853147,
						"acc_norm_stderr,none": 0.038970778815104114,
						"acc_stderr,none": 0.038970778815104114,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.3068181818181818,
						"acc_norm,none": 0.3068181818181818,
						"acc_norm_stderr,none": 0.03486142240553238,
						"acc_stderr,none": 0.03486142240553238,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.31543624161073824,
						"acc_norm,none": 0.31543624161073824,
						"acc_norm_stderr,none": 0.03819723167141383,
						"acc_stderr,none": 0.03819723167141383,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3220338983050847,
						"acc_norm,none": 0.3220338983050847,
						"acc_norm_stderr,none": 0.04319782230261343,
						"acc_stderr,none": 0.04319782230261343,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878284,
						"acc_stderr,none": 0.04122066502878284,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.32167832167832167,
						"acc_norm,none": 0.32167832167832167,
						"acc_norm_stderr,none": 0.03919986517659165,
						"acc_stderr,none": 0.03919986517659165,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.04040610178208841,
						"acc_stderr,none": 0.04040610178208841,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.032530209055933366,
						"acc_stderr,none": 0.032530209055933366,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.37209302325581395,
						"acc_norm,none": 0.37209302325581395,
						"acc_norm_stderr,none": 0.03696369368553605,
						"acc_stderr,none": 0.03696369368553605,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2725060827250608,
						"acc_norm,none": 0.2725060827250608,
						"acc_norm_stderr,none": 0.021989272196105043,
						"acc_stderr,none": 0.021989272196105043,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.40654205607476634,
						"acc_norm,none": 0.40654205607476634,
						"acc_norm_stderr,none": 0.033655644506134855,
						"acc_stderr,none": 0.033655644506134855,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3089430894308943,
						"acc_norm,none": 0.3089430894308943,
						"acc_norm_stderr,none": 0.041832732587876245,
						"acc_stderr,none": 0.041832732587876245,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.30327868852459017,
						"acc_norm,none": 0.30327868852459017,
						"acc_norm_stderr,none": 0.04178859878631876,
						"acc_stderr,none": 0.04178859878631876,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3619047619047619,
						"acc_norm,none": 0.3619047619047619,
						"acc_norm_stderr,none": 0.033240439515935034,
						"acc_stderr,none": 0.033240439515935034,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.31666666666666665,
						"acc_norm,none": 0.31666666666666665,
						"acc_norm_stderr,none": 0.034768900963930385,
						"acc_stderr,none": 0.034768900963930385,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.328042328042328,
						"acc_norm,none": 0.328042328042328,
						"acc_norm_stderr,none": 0.0342418307585366,
						"acc_stderr,none": 0.0342418307585366,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.28448275862068967,
						"acc_norm,none": 0.28448275862068967,
						"acc_norm_stderr,none": 0.0420716075558402,
						"acc_stderr,none": 0.0420716075558402,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.03855289616378948,
						"acc_stderr,none": 0.03855289616378948,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.045521571818039494,
						"acc_stderr,none": 0.045521571818039494,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.29714285714285715,
						"acc_norm,none": 0.29714285714285715,
						"acc_norm_stderr,none": 0.034645078898843724,
						"acc_stderr,none": 0.034645078898843724,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26066350710900477,
						"acc_norm,none": 0.26066350710900477,
						"acc_norm_stderr,none": 0.030293645661742804,
						"acc_stderr,none": 0.030293645661742804,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2632978723404255,
						"acc_norm,none": 0.2632978723404255,
						"acc_norm_stderr,none": 0.022743327388426434,
						"acc_stderr,none": 0.022743327388426434,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.33620689655172414,
						"acc_norm,none": 0.33620689655172414,
						"acc_norm_stderr,none": 0.031082338581586128,
						"acc_stderr,none": 0.031082338581586128,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3505747126436782,
						"acc_norm,none": 0.3505747126436782,
						"acc_norm_stderr,none": 0.03627703962615275,
						"acc_stderr,none": 0.03627703962615275,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3407079646017699,
						"acc_norm,none": 0.3407079646017699,
						"acc_norm_stderr,none": 0.03159648696778299,
						"acc_stderr,none": 0.03159648696778299,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3393939393939394,
						"acc_norm,none": 0.3393939393939394,
						"acc_norm_stderr,none": 0.036974422050315967,
						"acc_stderr,none": 0.036974422050315967,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.032530209055933366,
						"acc_stderr,none": 0.032530209055933366,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3136094674556213,
						"acc_norm,none": 0.3136094674556213,
						"acc_norm_stderr,none": 0.035795265164562245,
						"acc_stderr,none": 0.035795265164562245,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.32919254658385094,
						"acc_norm,none": 0.32919254658385094,
						"acc_norm_stderr,none": 0.03715043857896316,
						"acc_stderr,none": 0.03715043857896316,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.03675892481369823,
						"acc_stderr,none": 0.03675892481369823,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.19378948210792704,
						"mcc_stderr,none": 0.029047826464288228
					},
					"copa": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.03588702812826371,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.40933400417412,
						"likelihood_diff_stderr,none": 0.4962928995198181,
						"pct_stereotype,none": 0.5988372093023255,
						"pct_stereotype_stderr,none": 0.07252713733661571
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.621943947525343,
						"likelihood_diff_stderr,none": 0.08583781351133352,
						"pct_stereotype,none": 0.6302921884317233,
						"pct_stereotype_stderr,none": 0.011791342887242921
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.049450549450549,
						"likelihood_diff_stderr,none": 0.386331043953602,
						"pct_stereotype,none": 0.6813186813186813,
						"pct_stereotype_stderr,none": 0.049117041148312786
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.454545454545454,
						"likelihood_diff_stderr,none": 1.7935548062368227,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.842307692307692,
						"likelihood_diff_stderr,none": 0.6137097851947507,
						"pct_stereotype,none": 0.7538461538461538,
						"pct_stereotype_stderr,none": 0.05384615384615383
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.64296875,
						"likelihood_diff_stderr,none": 0.16132595018487114,
						"pct_stereotype,none": 0.6,
						"pct_stereotype_stderr,none": 0.027429019252949587
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.5931712962962963,
						"likelihood_diff_stderr,none": 0.23740973862414305,
						"pct_stereotype,none": 0.5694444444444444,
						"pct_stereotype_stderr,none": 0.03376922151252335
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.8003472222222223,
						"likelihood_diff_stderr,none": 0.31765755407157137,
						"pct_stereotype,none": 0.7361111111111112,
						"pct_stereotype_stderr,none": 0.05230618728513982
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.385334645669291,
						"likelihood_diff_stderr,none": 0.14819459971168905,
						"pct_stereotype,none": 0.5413385826771654,
						"pct_stereotype_stderr,none": 0.022129755490549064
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.5833333333333335,
						"likelihood_diff_stderr,none": 0.3434456095292042,
						"pct_stereotype,none": 0.7297297297297297,
						"pct_stereotype_stderr,none": 0.042343213610845386
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.875,
						"likelihood_diff_stderr,none": 0.43042813443754,
						"pct_stereotype,none": 0.9032258064516129,
						"pct_stereotype_stderr,none": 0.03082364793244869
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.1947368421052635,
						"likelihood_diff_stderr,none": 0.2450523922204115,
						"pct_stereotype,none": 0.6736842105263158,
						"pct_stereotype_stderr,none": 0.0341048643533449
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.199351520572451,
						"likelihood_diff_stderr,none": 0.07359515817661301,
						"pct_stereotype,none": 0.569469290399523,
						"pct_stereotype_stderr,none": 0.012094842430543592
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.161111111111111,
						"likelihood_diff_stderr,none": 0.30577338734037024,
						"pct_stereotype,none": 0.5777777777777777,
						"pct_stereotype_stderr,none": 0.05235473399540656
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.6346153846153846,
						"likelihood_diff_stderr,none": 0.9378163503071125,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.731060606060606,
						"likelihood_diff_stderr,none": 0.3859912392442704,
						"pct_stereotype,none": 0.696969696969697,
						"pct_stereotype_stderr,none": 0.057002420795512765
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.647196261682243,
						"likelihood_diff_stderr,none": 0.12572371207530317,
						"pct_stereotype,none": 0.573208722741433,
						"pct_stereotype_stderr,none": 0.02764962041526109
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.4456521739130435,
						"likelihood_diff_stderr,none": 0.19989508891992241,
						"pct_stereotype,none": 0.40711462450592883,
						"pct_stereotype_stderr,none": 0.03094877404932307
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.5694444444444446,
						"likelihood_diff_stderr,none": 0.43369607825951967,
						"pct_stereotype,none": 0.7083333333333334,
						"pct_stereotype_stderr,none": 0.05394274771736146
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.7907608695652173,
						"likelihood_diff_stderr,none": 0.13625099203255484,
						"pct_stereotype,none": 0.45652173913043476,
						"pct_stereotype_stderr,none": 0.0232495995623097
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.3163043478260867,
						"likelihood_diff_stderr,none": 0.2868873506650502,
						"pct_stereotype,none": 0.7217391304347827,
						"pct_stereotype_stderr,none": 0.04197239673902095
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.543956043956044,
						"likelihood_diff_stderr,none": 0.3072583827592758,
						"pct_stereotype,none": 0.7692307692307693,
						"pct_stereotype_stderr,none": 0.04441155916843277
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.8864795918367347,
						"likelihood_diff_stderr,none": 0.24790655000057166,
						"pct_stereotype,none": 0.7346938775510204,
						"pct_stereotype_stderr,none": 0.031616190581285016
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.12303149606299213,
						"exact_match_stderr,none": 0.007288617993812068
					},
					"glue": {
						"acc,none": 0.7428090757503573,
						"acc_stderr,none": 0.004389268188865825,
						"alias": "glue",
						"f1,none": 0.6815169910255877,
						"f1_stderr,none": 0.00019963595447268213,
						"mcc,none": 0.19378948210792704,
						"mcc_stderr,none": 0.029047826464288228
					},
					"hellaswag": {
						"acc,none": 0.5374427404899422,
						"acc_norm,none": 0.7252539334793866,
						"acc_norm_stderr,none": 0.004454739415705047,
						"acc_stderr,none": 0.004975770805464642,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.28533063817499277,
						"acc_norm,none": 0.28533063817499277,
						"acc_norm_stderr,none": 0.0293703816800794,
						"acc_stderr,none": 0.0293703816800794,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.0416333199893227,
						"acc_stderr,none": 0.0416333199893227,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.274,
						"acc_norm,none": 0.274,
						"acc_norm_stderr,none": 0.014111099288259583,
						"acc_stderr,none": 0.014111099288259583,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.286,
						"acc_norm,none": 0.286,
						"acc_norm_stderr,none": 0.01429714686251791,
						"acc_stderr,none": 0.01429714686251791,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306263,
						"acc_stderr,none": 0.014158794845306263,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.013860415257527911,
						"acc_stderr,none": 0.013860415257527911,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.24666666666666667,
						"acc_norm,none": 0.24666666666666667,
						"acc_norm_stderr,none": 0.017613084291727022,
						"acc_stderr,none": 0.017613084291727022,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.292,
						"acc_norm,none": 0.292,
						"acc_norm_stderr,none": 0.014385511563477343,
						"acc_stderr,none": 0.014385511563477343,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.345,
						"acc_norm,none": 0.345,
						"acc_norm_stderr,none": 0.015039986742055235,
						"acc_stderr,none": 0.015039986742055235,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.309,
						"acc_norm,none": 0.309,
						"acc_norm_stderr,none": 0.014619600977206488,
						"acc_stderr,none": 0.014619600977206488,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.03232801420614267,
						"acc_stderr,none": 0.03232801420614267,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.309,
						"acc_norm,none": 0.309,
						"acc_norm_stderr,none": 0.014619600977206493,
						"acc_stderr,none": 0.014619600977206493,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2230769230769231,
						"acc_norm,none": 0.2230769230769231,
						"acc_norm_stderr,none": 0.03665400868201044,
						"acc_stderr,none": 0.03665400868201044,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.046056618647183814,
						"acc_stderr,none": 0.046056618647183814,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.283,
						"acc_norm,none": 0.283,
						"acc_norm_stderr,none": 0.014251810906481744,
						"acc_stderr,none": 0.014251810906481744,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.324,
						"acc_norm,none": 0.324,
						"acc_norm_stderr,none": 0.014806864733738856,
						"acc_stderr,none": 0.014806864733738856,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314144,
						"acc_stderr,none": 0.013644675781314144,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306263,
						"acc_stderr,none": 0.014158794845306263,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.279,
						"acc_norm,none": 0.279,
						"acc_norm_stderr,none": 0.01419015011761203,
						"acc_stderr,none": 0.01419015011761203,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.287,
						"acc_norm,none": 0.287,
						"acc_norm_stderr,none": 0.014312087053809963,
						"acc_stderr,none": 0.014312087053809963,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.01422115470843494,
						"acc_stderr,none": 0.01422115470843494,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.014236526215291345,
						"acc_stderr,none": 0.014236526215291345,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.045604802157206845,
						"acc_stderr,none": 0.045604802157206845,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.292,
						"acc_norm,none": 0.292,
						"acc_norm_stderr,none": 0.014385511563477347,
						"acc_stderr,none": 0.014385511563477347,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.332,
						"acc_norm,none": 0.332,
						"acc_norm_stderr,none": 0.014899597242811492,
						"acc_stderr,none": 0.014899597242811492,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.283,
						"acc_norm,none": 0.283,
						"acc_norm_stderr,none": 0.014251810906481744,
						"acc_stderr,none": 0.014251810906481744,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.238,
						"acc_norm,none": 0.238,
						"acc_norm_stderr,none": 0.013473586661967227,
						"acc_stderr,none": 0.013473586661967227,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.01451239503354315,
						"acc_stderr,none": 0.01451239503354315,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920849,
						"acc_stderr,none": 0.013512312258920849,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.0180323860015301,
						"acc_stderr,none": 0.0180323860015301,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.01423652621529135,
						"acc_stderr,none": 0.01423652621529135,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.01420569610409151,
						"acc_stderr,none": 0.01420569610409151,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.014428554438445504,
						"acc_stderr,none": 0.014428554438445504,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.321,
						"acc_norm,none": 0.321,
						"acc_norm_stderr,none": 0.014770821817934656,
						"acc_stderr,none": 0.014770821817934656,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.24333333333333335,
						"acc_norm,none": 0.24333333333333335,
						"acc_norm_stderr,none": 0.02481518457232592,
						"acc_stderr,none": 0.02481518457232592,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.01391220865102135,
						"acc_stderr,none": 0.01391220865102135,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.01417451646148526,
						"acc_stderr,none": 0.01417451646148526,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.01428212095520048,
						"acc_stderr,none": 0.01428212095520048,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.031828687164775826,
						"acc_stderr,none": 0.031828687164775826,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809946,
						"acc_stderr,none": 0.013963164754809946,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.014221154708434942,
						"acc_stderr,none": 0.014221154708434942,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.030056479497755487,
						"acc_stderr,none": 0.030056479497755487,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.358,
						"acc_norm,none": 0.358,
						"acc_norm_stderr,none": 0.01516792886540756,
						"acc_stderr,none": 0.01516792886540756,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.536066652049989,
						"acc_norm,none": 0.554,
						"acc_norm_stderr,none": 0.0004951583166332651,
						"acc_stderr,none": 0.04719968136553444,
						"alias": "kobest",
						"f1,none": 0.45268876755448434,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.542022792022792,
						"acc_stderr,none": 0.013301538369184794,
						"alias": " - kobest_boolq",
						"f1,none": 0.4324069946627753,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.636,
						"acc_stderr,none": 0.015222868840522024,
						"alias": " - kobest_copa",
						"f1,none": 0.6349354718358734,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.432,
						"acc_norm,none": 0.554,
						"acc_norm_stderr,none": 0.0222521530785959,
						"acc_stderr,none": 0.022175109265613155,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.42674588994259266,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5465994962216625,
						"acc_stderr,none": 0.025016584749017332,
						"alias": " - kobest_sentineg",
						"f1,none": 0.451926616762793,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604057,
						"alias": " - kobest_wic",
						"f1,none": 0.34118314654560933,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7164758393169027,
						"acc_stderr,none": 0.017011820691953353,
						"alias": "lambada",
						"perplexity,none": 3.6514505851412062,
						"perplexity_stderr,none": 0.18670037759927946
					},
					"lambada_cloze": {
						"acc,none": 0.025519115078594994,
						"acc_stderr,none": 0.002285582325747542,
						"alias": "lambada_cloze",
						"perplexity,none": 605.5214258462441,
						"perplexity_stderr,none": 147.51755981719484
					},
					"lambada_multilingual": {
						"acc,none": 0.5402678051620415,
						"acc_stderr,none": 0.08533485225340719,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.018212835843947,
						"perplexity_stderr,none": 8.182202479820372
					},
					"lambada_openai": {
						"acc,none": 0.7469435280419173,
						"acc_stderr,none": 0.0060570991335995505,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3049986552890807,
						"perplexity_stderr,none": 0.06436847033786772
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.02678051620415292,
						"acc_stderr,none": 0.0022491941343246082,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 313.7044487298791,
						"perplexity_stderr,none": 9.932086942082456
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4302348146710654,
						"acc_stderr,none": 0.006897835015074963,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.168602283161235,
						"perplexity_stderr,none": 1.8880831212865108
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7483019600232874,
						"acc_stderr,none": 0.00604631029126968,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.307987517900552,
						"perplexity_stderr,none": 0.06447249383171981
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4541044052008539,
						"acc_stderr,none": 0.006936569231082088,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.14379105337423,
						"perplexity_stderr,none": 1.4298846808687438
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5513293227246264,
						"acc_stderr,none": 0.0069291739196654855,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.522165086528716,
						"perplexity_stderr,none": 0.8026704485862675
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5173685231903745,
						"acc_stderr,none": 0.006961773596960157,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.948518238254994,
						"perplexity_stderr,none": 1.1606936553148284
					},
					"lambada_standard": {
						"acc,none": 0.6848437803221424,
						"acc_stderr,none": 0.006472480817588202,
						"alias": " - lambada_standard",
						"perplexity,none": 3.994658535097017,
						"perplexity_stderr,none": 0.08156993028154927
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.024257713953037066,
						"acc_stderr,none": 0.002143406638058574,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 897.3384029626093,
						"perplexity_stderr,none": 29.01034119934538
					},
					"logiqa": {
						"acc,none": 0.25960061443932414,
						"acc_norm,none": 0.29185867895545314,
						"acc_norm_stderr,none": 0.017831570553971925,
						"acc_stderr,none": 0.017196070008180027,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2582697201017812,
						"acc_norm,none": 0.2881679389312977,
						"acc_norm_stderr,none": 0.011426770634965253,
						"acc_stderr,none": 0.011042608058378034,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2489112227805695,
						"acc_norm,none": 0.25159128978224454,
						"acc_norm_stderr,none": 0.007943608064206829,
						"acc_stderr,none": 0.007915319798861353,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3979029866553696,
						"acc_stderr,none": 0.005037474226282458,
						"alias": "mc_taco",
						"f1,none": 0.5220680958385876,
						"f1_stderr,none": 0.005583134755290975
					},
					"medmcqa": {
						"acc,none": 0.3511833612240019,
						"acc_norm,none": 0.3511833612240019,
						"acc_norm_stderr,none": 0.007381352414568343,
						"acc_stderr,none": 0.007381352414568343,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.01352516233552755,
						"acc_stderr,none": 0.01352516233552755,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.39638228172624984,
						"acc_stderr,none": 0.08490541424618747,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.04232073695151589,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.39473684210526316,
						"acc_stderr,none": 0.039777499346220734,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.047609522856952344,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.37358490566037733,
						"acc_stderr,none": 0.029773082713319875,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4166666666666667,
						"acc_stderr,none": 0.04122728707651283,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145632,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.047609522856952365,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.35260115606936415,
						"acc_stderr,none": 0.03643037168958548,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.040925639582376556,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.050161355804659205,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3829787234042553,
						"acc_stderr,none": 0.03177821250236922,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2894736842105263,
						"acc_stderr,none": 0.04266339443159394,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4413793103448276,
						"acc_stderr,none": 0.04137931034482758,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.29894179894179895,
						"acc_stderr,none": 0.023577604791655788,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.042163702135578345,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.432258064516129,
						"acc_stderr,none": 0.028181739720019406,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2955665024630542,
						"acc_stderr,none": 0.032104944337514575,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.0498887651569859,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5575757575757576,
						"acc_stderr,none": 0.038783721137112745,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4696969696969697,
						"acc_stderr,none": 0.03555804051763928,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5647668393782384,
						"acc_stderr,none": 0.035780381650085874,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.37435897435897436,
						"acc_stderr,none": 0.024537591572830506,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.027309140588230186,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3739495798319328,
						"acc_stderr,none": 0.03142946637883708,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5211009174311927,
						"acc_stderr,none": 0.02141822475426464,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2361111111111111,
						"acc_stderr,none": 0.028963702570791016,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.47549019607843135,
						"acc_stderr,none": 0.035050931943487976,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5485232067510548,
						"acc_stderr,none": 0.032393600173974704,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3721973094170404,
						"acc_stderr,none": 0.03244305283008731,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5190839694656488,
						"acc_stderr,none": 0.04382094705550988,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3706695005313496,
						"acc_stderr,none": 0.08460582538198343,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4214876033057851,
						"acc_stderr,none": 0.04507732278775094,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.42592592592592593,
						"acc_stderr,none": 0.0478034362693679,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.34355828220858897,
						"acc_stderr,none": 0.03731133519673892,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.04432804055291519,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5436893203883495,
						"acc_stderr,none": 0.049318019942204146,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6111111111111112,
						"acc_stderr,none": 0.03193705726200293,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.049888765156985884,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.558109833971903,
						"acc_stderr,none": 0.01775880053421442,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3670520231213873,
						"acc_stderr,none": 0.025950054337654085,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.21899441340782122,
						"acc_stderr,none": 0.013831676687303208,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.43137254901960786,
						"acc_stderr,none": 0.028358956313423545,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.44383649822980364,
						"acc_stderr,none": 0.08449126992238441,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4662379421221865,
						"acc_stderr,none": 0.028333277109562807,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4537037037037037,
						"acc_stderr,none": 0.0277012284685426,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.28368794326241137,
						"acc_stderr,none": 0.026891709428343954,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.32985658409387225,
						"acc_stderr,none": 0.012008129938540458,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4485294117647059,
						"acc_stderr,none": 0.0302114796091216,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.4084967320261438,
						"acc_stderr,none": 0.01988622103750187,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.41818181818181815,
						"acc_stderr,none": 0.04724577405731572,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.34285714285714286,
						"acc_stderr,none": 0.03038726291954773,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4436139096522587,
						"acc_stderr,none": 0.06922719357417789,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5572139303482587,
						"acc_stderr,none": 0.03512310964123935,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3418966064065968,
						"acc_stderr,none": 0.07457392642719728,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.05024183937956913,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.03726214354322415,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6374269005847953,
						"acc_stderr,none": 0.036871306155620606,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7811512990320937,
						"acc_stderr,none": 0.004173654928336798,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7789869812855981,
						"acc_stderr,none": 0.004184804557506983,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7328431372549019,
						"acc_stderr,none": 0.021932668544150206,
						"alias": "mrpc",
						"f1,none": 0.8315301391035549,
						"f1_stderr,none": 0.01594091962048564
					},
					"multimedqa": {
						"acc,none": 0.3877927608232789,
						"acc_norm,none": 0.35722933321011047,
						"acc_norm_stderr,none": 0.00011139501530609095,
						"acc_stderr,none": 0.08499791955465495,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5653877887788779,
						"acc_stderr,none": 0.007120125761242575,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7052294974536982,
						"mrr_stderr,none": 0.010373450408762848,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.41196388261851014,
						"r@2_stderr,none": 0.016544739619609432
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6543453740414324,
						"mrr_stderr,none": 0.010426826971521148,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4762979683972912,
						"r@2_stderr,none": 0.016788421275515525
					},
					"openbookqa": {
						"acc,none": 0.288,
						"acc_norm,none": 0.41,
						"acc_norm_stderr,none": 0.022017482578127676,
						"acc_stderr,none": 0.020271503835075227,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.451,
						"acc_stderr,none": 0.011129305041886323,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3785,
						"acc_stderr,none": 0.010847935926107404,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4195,
						"acc_stderr,none": 0.011037245371590673,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5475,
						"acc_stderr,none": 0.011132557743886098,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.537,
						"acc_stderr,none": 0.011152474561478175,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5315,
						"acc_stderr,none": 0.01116092102288328,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.011172305500884876,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4838571428571429,
						"acc_stderr,none": 0.05204997217747734,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7758433079434167,
						"acc_norm,none": 0.7845484221980413,
						"acc_norm_stderr,none": 0.009592463115658116,
						"acc_stderr,none": 0.00972989795641004,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.23350768573868488,
						"acc_norm,none": 0.28351836037574724,
						"acc_norm_stderr,none": 0.0032928070163216654,
						"acc_stderr,none": 0.0030908479661250865,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.02031317923174519,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.734860017935992,
						"acc_norm,none": 0.6410180572742012,
						"acc_norm_stderr,none": 0.010073944915983306,
						"acc_stderr,none": 0.1545591361739994,
						"alias": "pythia",
						"bits_per_byte,none": 0.6329207167167666,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.550701191592826,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3049986552890807,
						"perplexity_stderr,none": 0.06436847033786772,
						"word_perplexity,none": 10.44334728436579,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.39361702127659576,
						"acc_norm,none": 0.4432624113475177,
						"acc_norm_stderr,none": 0.05496421099943326,
						"acc_stderr,none": 0.04134187998803396,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.44166666666666665,
						"acc_norm,none": 0.5416666666666666,
						"acc_norm_stderr,none": 0.04567549854280212,
						"acc_stderr,none": 0.04552192400253556,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.35,
						"acc_norm,none": 0.45625,
						"acc_norm_stderr,none": 0.0395004925930594,
						"acc_stderr,none": 0.037826149818120415,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.397887323943662,
						"acc_norm,none": 0.39436619718309857,
						"acc_norm_stderr,none": 0.02905103950765015,
						"acc_stderr,none": 0.0290954929170649,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7555033391046253,
						"acc_stderr,none": 0.002137510026507625,
						"alias": "qqp",
						"f1,none": 0.6802523047064531,
						"f1_stderr,none": 0.0030402013396927208
					},
					"race": {
						"acc,none": 0.33779904306220093,
						"acc_stderr,none": 0.014637734314782855,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2646,
						"em_stderr,none": 0.004411420413313981,
						"f1,none": 0.27403523832857607,
						"f1_stderr,none": 0.004423306651959145
					},
					"rte": {
						"acc,none": 0.6931407942238267,
						"acc_stderr,none": 0.02776040303805896,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.959,
						"acc_norm,none": 0.952,
						"acc_norm_stderr,none": 0.006763264133666646,
						"acc_stderr,none": 0.006273624021118772,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6895306859205776,
						"acc_stderr,none": 0.02785041039263069,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.9139908256880734,
						"acc_stderr,none": 0.009500232412777832,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.575777266819954,
						"acc_norm,none": 0.7694191742477257,
						"acc_norm_stderr,none": 0.002977994008608363,
						"acc_stderr,none": 0.003494257837110452,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6779142125054075,
						"acc_stderr,none": 0.07049456813218703,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5994591346153846,
						"acc_stderr,none": 0.004904250799377168,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8634843417452113,
						"acc_stderr,none": 0.0034565903366070684,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5751960784313725,
						"acc_stderr,none": 0.004894670046194924,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.32629359117380474,
						"acc_stderr,none": 0.0013742304972474315,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -7.7889428962189635,
						"bleu_diff_stderr,none": 0.8805957382814702,
						"bleu_max,none": 26.912894718240686,
						"bleu_max_stderr,none": 0.7951558019921323,
						"rouge1_acc,none": 0.2741738066095471,
						"rouge1_acc_stderr,none": 0.015616518497219354,
						"rouge1_diff,none": -9.76692863997879,
						"rouge1_diff_stderr,none": 0.961696878023807,
						"rouge1_max,none": 52.57017392197321,
						"rouge1_max_stderr,none": 0.8615452352094476,
						"rouge2_acc,none": 0.2533659730722154,
						"rouge2_acc_stderr,none": 0.015225899340826856,
						"rouge2_diff,none": -11.809545537416474,
						"rouge2_diff_stderr,none": 1.1570613146672923,
						"rouge2_max,none": 36.366743420737464,
						"rouge2_max_stderr,none": 1.0111827239857785,
						"rougeL_acc,none": 0.2766217870257038,
						"rougeL_acc_stderr,none": 0.01565960575532691,
						"rougeL_diff,none": -10.058860640320813,
						"rougeL_diff_stderr,none": 0.9810757717447302,
						"rougeL_max,none": 49.643806300008606,
						"rougeL_max_stderr,none": 0.8780169244542013
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -7.7889428962189635,
						"bleu_diff_stderr,none": 0.8805957382814702,
						"bleu_max,none": 26.912894718240686,
						"bleu_max_stderr,none": 0.7951558019921323,
						"rouge1_acc,none": 0.2741738066095471,
						"rouge1_acc_stderr,none": 0.015616518497219354,
						"rouge1_diff,none": -9.76692863997879,
						"rouge1_diff_stderr,none": 0.961696878023807,
						"rouge1_max,none": 52.57017392197321,
						"rouge1_max_stderr,none": 0.8615452352094476,
						"rouge2_acc,none": 0.2533659730722154,
						"rouge2_acc_stderr,none": 0.015225899340826856,
						"rouge2_diff,none": -11.809545537416474,
						"rouge2_diff_stderr,none": 1.1570613146672923,
						"rouge2_max,none": 36.366743420737464,
						"rouge2_max_stderr,none": 1.0111827239857785,
						"rougeL_acc,none": 0.2766217870257038,
						"rougeL_acc_stderr,none": 0.01565960575532691,
						"rougeL_diff,none": -10.058860640320813,
						"rougeL_diff_stderr,none": 0.9810757717447302,
						"rougeL_max,none": 49.643806300008606,
						"rougeL_max_stderr,none": 0.8780169244542013
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2582619339045288,
						"acc_stderr,none": 0.015321821688476187,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3943252484430807,
						"acc_stderr,none": 0.014085372312709014,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.12303149606299213,
						"exact_match_stderr,none": 0.007288617993812068
					},
					"wic": {
						"acc,none": 0.5830721003134797,
						"acc_stderr,none": 0.019535381343949925,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6329207167167666,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.550701191592826,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.44334728436579,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7111286503551697,
						"acc_stderr,none": 0.01273824127101845,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4788732394366197,
						"acc_stderr,none": 0.05970805879899505,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.4326923076923077,
						"acc_stderr,none": 0.04881803687006195,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8608058608058609,
						"acc_stderr,none": 0.020988366070850993,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6223636363636363,
						"acc_stderr,none": 0.07070266932313447,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.021912377885779967,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.02043534209189613,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.01968468882019472,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.022221331534143025,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.022149790663861923,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.02204949796982787,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.64,
						"acc_stderr,none": 0.02148775108972053,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.020055833888070893,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.020475118092988978,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4380187416331995,
						"acc_stderr,none": 0.04869554410247458,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.334136546184739,
						"acc_stderr,none": 0.009454577602463623,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4751004016064257,
						"acc_stderr,none": 0.01000963798302251,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4811244979919679,
						"acc_stderr,none": 0.010014928901071305,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39156626506024095,
						"acc_stderr,none": 0.009783558109997086,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5240963855421686,
						"acc_stderr,none": 0.010010427753210673,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5012048192771085,
						"acc_stderr,none": 0.010022043771315572,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4991967871485944,
						"acc_stderr,none": 0.010022059935722397,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41686746987951806,
						"acc_stderr,none": 0.009882576606533239,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4831325301204819,
						"acc_stderr,none": 0.010016368453021547,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.4036144578313253,
						"acc_stderr,none": 0.009834096424955401,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41485943775100403,
						"acc_stderr,none": 0.009875705744164682,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.45863453815261046,
						"acc_stderr,none": 0.009987716412406566,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41566265060240964,
						"acc_stderr,none": 0.009878474341822933,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41767068273092367,
						"acc_stderr,none": 0.009885277727840171,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3534136546184739,
						"acc_stderr,none": 0.009581698005070976,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6284820407917695,
						"acc_stderr,none": 0.05447442472750628,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5890138980807412,
						"acc_stderr,none": 0.012661578894368947,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7782925215089345,
						"acc_stderr,none": 0.010689887294959687,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.71343481138319,
						"acc_stderr,none": 0.011635910995502253,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5632031767041694,
						"acc_stderr,none": 0.012763912250173636,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5956320317670417,
						"acc_stderr,none": 0.012629580396570942,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6624751819986764,
						"acc_stderr,none": 0.012168840221678036,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5526141628060887,
						"acc_stderr,none": 0.0127956881673853,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6823295830575777,
						"acc_stderr,none": 0.011981108837175403,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5512905360688286,
						"acc_stderr,none": 0.012799246690109753,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5830575777630708,
						"acc_stderr,none": 0.012688354121607806,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6419589675711449,
						"acc_stderr,none": 0.012337624883487578,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8172623061362104,
						"acc_stderr,none": 0.038827538141508404,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8821505376344086,
						"acc_stderr,none": 0.006688320753621778,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109368,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7393117831074035,
						"acc_stderr,none": 0.014183772611573471,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7908745247148289,
						"acc_stderr,none": 0.025125031682933376,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6825396825396826,
						"acc_stderr,none": 0.026269018848607703,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7777777777777778,
						"acc_stderr,none": 0.018536917448559433,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C2-rwkv-648_pth"
	},
	"./rwkv-x-dev/1_3-C5-rwkv-270_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.640924464487035,
						"acc_norm,none": 0.6347237880496054,
						"acc_norm_stderr,none": 0.08777391968765197,
						"acc_stderr,none": 0.10818974412056676,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4903125,
						"acc_stderr,none": 0.045600531197118724,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.1621398952228472,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8138955223880597,
						"acc_stderr,none": 0.16168290571436741,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.274888558692422,
						"acc_norm,none": 0.274888558692422,
						"acc_norm_stderr,none": 0.12374381777879588,
						"acc_stderr,none": 0.12374381777879588,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.30253842168882744,
						"acc_norm,none": 0.30253842168882744,
						"acc_norm_stderr,none": 0.05551994400479606,
						"acc_stderr,none": 0.05551994400479606,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.401032349433512,
						"likelihood_diff_stderr,none": 0.49369241162259725,
						"pct_stereotype,none": 0.5928741800834824,
						"pct_stereotype_stderr,none": 0.0735708212675542
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.15846456692913385,
						"exact_match_stderr,none": 0.008103027740956018
					},
					"glue": {
						"acc,none": 0.7132526559857594,
						"acc_stderr,none": 0.042112227934448423,
						"alias": "glue",
						"f1,none": 0.6326466704791135,
						"f1_stderr,none": 0.00033722797032780433,
						"mcc,none": 0.15650538724366975,
						"mcc_stderr,none": 0.0007411789770813937
					},
					"kmmlu": {
						"acc,none": 0.2872653768408894,
						"acc_norm,none": 0.2872653768408894,
						"acc_norm_stderr,none": 0.028665763438712376,
						"acc_stderr,none": 0.028665763438712376,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5358474018855515,
						"acc_norm,none": 0.55,
						"acc_norm_stderr,none": 0.0004959919839679342,
						"acc_stderr,none": 0.04894891516216254,
						"alias": "kobest",
						"f1,none": 0.4640272504035749,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7134678827867261,
						"acc_stderr,none": 0.01751591427398706,
						"alias": "lambada",
						"perplexity,none": 3.6803356500824975,
						"perplexity_stderr,none": 0.19091419550622318
					},
					"lambada_cloze": {
						"acc,none": 0.03289346012031826,
						"acc_stderr,none": 0.002541377441051892,
						"alias": "lambada_cloze",
						"perplexity,none": 522.7010411569611,
						"perplexity_stderr,none": 127.61113451422388
					},
					"lambada_multilingual": {
						"acc,none": 0.5425189210168834,
						"acc_stderr,none": 0.0841264148047786,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.830489716125598,
						"perplexity_stderr,none": 8.132453127807624
					},
					"mmlu": {
						"acc,none": 0.39894601908560035,
						"acc_stderr,none": 0.08243052855347992,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.37130712008501593,
						"acc_stderr,none": 0.08058779150477699,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.45123913743160604,
						"acc_stderr,none": 0.08088272662485922,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4406889827754306,
						"acc_stderr,none": 0.06874869757119362,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.34792261338407865,
						"acc_stderr,none": 0.07394260175893973,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.38381831085876505,
						"acc_norm,none": 0.35013841571764037,
						"acc_norm_stderr,none": 0.000107888339920679,
						"acc_stderr,none": 0.09205764299170353,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4825714285714286,
						"acc_stderr,none": 0.05011655600534673,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7371012082692274,
						"acc_norm,none": 0.6389293549400216,
						"acc_norm_stderr,none": 0.00980933686646588,
						"acc_stderr,none": 0.1523998964694732,
						"alias": "pythia",
						"bits_per_byte,none": 0.6328346375272971,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5506086709174187,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3279099980503535,
						"perplexity_stderr,none": 0.06476419656839366,
						"word_perplexity,none": 10.44001577641618,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.38652482269503546,
						"acc_norm,none": 0.4521276595744681,
						"acc_norm_stderr,none": 0.057945047680791534,
						"acc_stderr,none": 0.044400168586075706,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6736215100994976,
						"acc_stderr,none": 0.08447157646683724,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.32746936852663516,
						"acc_stderr,none": 0.0014562895588545325,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3072215422276622,
						"bleu_acc_stderr,none": 0.016150201321323013,
						"bleu_diff,none": -8.214221247964888,
						"bleu_diff_stderr,none": 0.8658822229968159,
						"bleu_max,none": 27.310848423515786,
						"bleu_max_stderr,none": 0.8092416776606851,
						"rouge1_acc,none": 0.2778457772337821,
						"rouge1_acc_stderr,none": 0.01568092936402465,
						"rouge1_diff,none": -10.614300262439293,
						"rouge1_diff_stderr,none": 0.9151281001062015,
						"rouge1_max,none": 52.84718552395938,
						"rouge1_max_stderr,none": 0.8575997575020479,
						"rouge2_acc,none": 0.24724602203182375,
						"rouge2_acc_stderr,none": 0.015102404797359652,
						"rouge2_diff,none": -12.53777492354921,
						"rouge2_diff_stderr,none": 1.1187754496884528,
						"rouge2_max,none": 36.68614165650879,
						"rouge2_max_stderr,none": 1.0198599269927322,
						"rougeL_acc,none": 0.2802937576499388,
						"rougeL_acc_stderr,none": 0.015723139524608756,
						"rougeL_diff,none": -10.894530995061702,
						"rougeL_diff_stderr,none": 0.9347372413501509,
						"rougeL_max,none": 49.825789813750234,
						"rougeL_max_stderr,none": 0.8774269078697249
					},
					"xcopa": {
						"acc,none": 0.6214545454545454,
						"acc_stderr,none": 0.07023171601514061,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.44061579651941096,
						"acc_stderr,none": 0.049870647580300576,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.628722700198544,
						"acc_stderr,none": 0.060089979068040984,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8172623061362104,
						"acc_stderr,none": 0.039142099484241494,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.640924464487035,
						"acc_norm,none": 0.6347237880496054,
						"acc_norm_stderr,none": 0.08777391968765197,
						"acc_stderr,none": 0.10818974412056676,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4903125,
						"acc_stderr,none": 0.045600531197118724,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.583,
						"acc_stderr,none": 0.015599819048769618,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.452,
						"acc_stderr,none": 0.015746235865880677,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.445,
						"acc_stderr,none": 0.014352148043453762,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4121160409556314,
						"acc_norm,none": 0.4496587030716723,
						"acc_norm_stderr,none": 0.01453714444428474,
						"acc_stderr,none": 0.014383915302225398,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7537878787878788,
						"acc_norm,none": 0.726010101010101,
						"acc_norm_stderr,none": 0.009151805901544019,
						"acc_stderr,none": 0.008839902656771878,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.1621398952228472,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.01092893960365916,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.575,
						"acc_stderr,none": 0.011056609982818336,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.011175886999478619,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.9975,
						"acc_stderr,none": 0.00111691483532754,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.3515,
						"acc_stderr,none": 0.010678524731685646,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.6985,
						"acc_stderr,none": 0.010264090353040862,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.4445,
						"acc_stderr,none": 0.0111140287842845,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.5685,
						"acc_stderr,none": 0.011077690761900849,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.3675,
						"acc_stderr,none": 0.01078332114923322,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.267,
						"acc_stderr,none": 0.00989466786840821,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0013015184381778742,
						"acc_stderr,none": 0.0007511058074590368,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8138955223880597,
						"acc_stderr,none": 0.16168290571436741,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525061,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611458,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.011772110370812184,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523732,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.013019735539307794,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.01567232023733621,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.011598902298689007,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.010640169792499357,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987286,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611494,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323495,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140911,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697068,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024952,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118585,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727065,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524317,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.014526080235459541,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.679,
						"acc_stderr,none": 0.014770821817934642,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.761,
						"acc_stderr,none": 0.013493000446937594,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704164,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397335,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611463,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.457,
						"acc_stderr,none": 0.015760691590136384,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783236,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386702,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.579,
						"acc_stderr,none": 0.01562059547530132,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.014236526215291343,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796387,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.01060525678479657,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697584,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866439,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.014414290540008211,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724453,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.014876872027456729,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.585,
						"acc_stderr,none": 0.015589035185604637,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.535,
						"acc_stderr,none": 0.01578049505003016,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.010131468138757004,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.015790799515836763,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525073,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662739,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.013106173040661775,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240653,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578002,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400252,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.01123486636423524,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.015782683329937618,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280308,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.01024421514533666,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274703,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.703,
						"acc_stderr,none": 0.014456832294801098,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.439,
						"acc_stderr,none": 0.015701131345400778,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504418,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.01556091713692166,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235239,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381791,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.739,
						"acc_stderr,none": 0.013895037677965127,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160326,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792946,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487912,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275291,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410048,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.352,
						"acc_stderr,none": 0.01511040450564867,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.291,
						"acc_stderr,none": 0.01437099598237795,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7036697247706422,
						"acc_stderr,none": 0.007986656109200201,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.04459412925079224,
						"alias": "cb",
						"f1,none": 0.711183235164483,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.274888558692422,
						"acc_norm,none": 0.274888558692422,
						"acc_norm_stderr,none": 0.12374381777879588,
						"acc_stderr,none": 0.12374381777879588,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.062069005411206316,
						"acc_stderr,none": 0.062069005411206316,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5151515151515151,
						"acc_norm,none": 0.5151515151515151,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122592,
						"acc_stderr,none": 0.08503766788122592,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0879391124952055,
						"acc_stderr,none": 0.0879391124952055,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.23404255319148937,
						"acc_norm,none": 0.23404255319148937,
						"acc_norm_stderr,none": 0.06242676343682882,
						"acc_stderr,none": 0.06242676343682882,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.05763033956734372,
						"acc_stderr,none": 0.05763033956734372,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.06288639360110458,
						"acc_stderr,none": 0.06288639360110458,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.07802030664724673,
						"acc_stderr,none": 0.07802030664724673,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.12909944487358055,
						"acc_stderr,none": 0.12909944487358055,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857371,
						"acc_stderr,none": 0.08982552969857371,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063838,
						"acc_stderr,none": 0.07213122508063838,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.11433239009500591,
						"acc_stderr,none": 0.11433239009500591,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.060073850409370216,
						"acc_stderr,none": 0.060073850409370216,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894599,
						"acc_stderr,none": 0.10540925533894599,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.085947008518708,
						"acc_stderr,none": 0.085947008518708,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.062069005411206336,
						"acc_stderr,none": 0.062069005411206336,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628253,
						"acc_stderr,none": 0.05817221556628253,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.38636363636363635,
						"acc_norm,none": 0.38636363636363635,
						"acc_norm_stderr,none": 0.07425392901036847,
						"acc_stderr,none": 0.07425392901036847,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.3695652173913043,
						"acc_norm,none": 0.3695652173913043,
						"acc_norm_stderr,none": 0.07195473383945741,
						"acc_stderr,none": 0.07195473383945741,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0879391124952055,
						"acc_stderr,none": 0.0879391124952055,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.30253842168882744,
						"acc_norm,none": 0.30253842168882744,
						"acc_norm_stderr,none": 0.05551994400479606,
						"acc_stderr,none": 0.05551994400479606,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.30177514792899407,
						"acc_norm,none": 0.30177514792899407,
						"acc_norm_stderr,none": 0.03541479614288122,
						"acc_stderr,none": 0.03541479614288122,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.28378378378378377,
						"acc_norm,none": 0.28378378378378377,
						"acc_norm_stderr,none": 0.03718409321285373,
						"acc_stderr,none": 0.03718409321285373,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.4125,
						"acc_norm,none": 0.4125,
						"acc_norm_stderr,none": 0.039040677866833816,
						"acc_stderr,none": 0.039040677866833816,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.03588624800091709,
						"acc_stderr,none": 0.03588624800091709,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.33014354066985646,
						"acc_norm,none": 0.33014354066985646,
						"acc_norm_stderr,none": 0.032606982441813086,
						"acc_stderr,none": 0.032606982441813086,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03489370652018761,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.31297709923664124,
						"acc_norm,none": 0.31297709923664124,
						"acc_norm_stderr,none": 0.04066962905677698,
						"acc_stderr,none": 0.04066962905677698,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3161764705882353,
						"acc_norm,none": 0.3161764705882353,
						"acc_norm_stderr,none": 0.040019338846834944,
						"acc_stderr,none": 0.040019338846834944,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.35514018691588783,
						"acc_norm,none": 0.35514018691588783,
						"acc_norm_stderr,none": 0.04648144634449115,
						"acc_stderr,none": 0.04648144634449115,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.02590393636568493,
						"acc_stderr,none": 0.02590393636568493,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.27941176470588236,
						"acc_norm,none": 0.27941176470588236,
						"acc_norm_stderr,none": 0.03149328104507956,
						"acc_stderr,none": 0.03149328104507956,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.36312849162011174,
						"acc_norm,none": 0.36312849162011174,
						"acc_norm_stderr,none": 0.03604508648229061,
						"acc_stderr,none": 0.03604508648229061,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.23628691983122363,
						"acc_norm,none": 0.23628691983122363,
						"acc_norm_stderr,none": 0.027652153144159267,
						"acc_stderr,none": 0.027652153144159267,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.040225592469367126,
						"acc_stderr,none": 0.040225592469367126,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.4205607476635514,
						"acc_norm,none": 0.4205607476635514,
						"acc_norm_stderr,none": 0.04794743635189596,
						"acc_stderr,none": 0.04794743635189596,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809826,
						"acc_stderr,none": 0.039578354719809826,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.0423247353205504,
						"acc_stderr,none": 0.0423247353205504,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2783882783882784,
						"acc_norm,none": 0.2783882783882784,
						"acc_norm_stderr,none": 0.02717645531875414,
						"acc_stderr,none": 0.02717645531875414,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.0332057461294543,
						"acc_stderr,none": 0.0332057461294543,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2573099415204678,
						"acc_norm,none": 0.2573099415204678,
						"acc_norm_stderr,none": 0.03352799844161865,
						"acc_stderr,none": 0.03352799844161865,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.272108843537415,
						"acc_norm,none": 0.272108843537415,
						"acc_norm_stderr,none": 0.036832239154550236,
						"acc_stderr,none": 0.036832239154550236,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.26618705035971224,
						"acc_norm,none": 0.26618705035971224,
						"acc_norm_stderr,none": 0.037622409350890895,
						"acc_stderr,none": 0.037622409350890895,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.037676093121953455,
						"acc_stderr,none": 0.037676093121953455,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3496932515337423,
						"acc_norm,none": 0.3496932515337423,
						"acc_norm_stderr,none": 0.03746668325470021,
						"acc_stderr,none": 0.03746668325470021,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.3023255813953488,
						"acc_norm,none": 0.3023255813953488,
						"acc_norm_stderr,none": 0.03512091263428369,
						"acc_stderr,none": 0.03512091263428369,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2896825396825397,
						"acc_norm,none": 0.2896825396825397,
						"acc_norm_stderr,none": 0.02863192475336099,
						"acc_stderr,none": 0.02863192475336099,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.29292929292929293,
						"acc_norm,none": 0.29292929292929293,
						"acc_norm_stderr,none": 0.032424979581788166,
						"acc_stderr,none": 0.032424979581788166,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.032145368597886394,
						"acc_stderr,none": 0.032145368597886394,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.028361099300075063,
						"acc_stderr,none": 0.028361099300075063,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.038850042458002554,
						"acc_stderr,none": 0.038850042458002554,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3006993006993007,
						"acc_norm,none": 0.3006993006993007,
						"acc_norm_stderr,none": 0.03848167949490064,
						"acc_stderr,none": 0.03848167949490064,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.30113636363636365,
						"acc_norm,none": 0.30113636363636365,
						"acc_norm_stderr,none": 0.03467837977202437,
						"acc_stderr,none": 0.03467837977202437,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3087248322147651,
						"acc_norm,none": 0.3087248322147651,
						"acc_norm_stderr,none": 0.037973480272130815,
						"acc_stderr,none": 0.037973480272130815,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3474576271186441,
						"acc_norm,none": 0.3474576271186441,
						"acc_norm_stderr,none": 0.04402124821792678,
						"acc_stderr,none": 0.04402124821792678,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.22560975609756098,
						"acc_norm,none": 0.22560975609756098,
						"acc_norm_stderr,none": 0.03273897454566341,
						"acc_stderr,none": 0.03273897454566341,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03873144730600104,
						"acc_stderr,none": 0.03873144730600104,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.30158730158730157,
						"acc_norm,none": 0.30158730158730157,
						"acc_norm_stderr,none": 0.04104947269903394,
						"acc_stderr,none": 0.04104947269903394,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3488372093023256,
						"acc_norm,none": 0.3488372093023256,
						"acc_norm_stderr,none": 0.03644669348694787,
						"acc_stderr,none": 0.03644669348694787,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2749391727493917,
						"acc_norm,none": 0.2749391727493917,
						"acc_norm_stderr,none": 0.02205025435599508,
						"acc_stderr,none": 0.02205025435599508,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.40654205607476634,
						"acc_norm,none": 0.40654205607476634,
						"acc_norm_stderr,none": 0.033655644506134855,
						"acc_stderr,none": 0.033655644506134855,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3008130081300813,
						"acc_norm,none": 0.3008130081300813,
						"acc_norm_stderr,none": 0.04152073768551428,
						"acc_stderr,none": 0.04152073768551428,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.30327868852459017,
						"acc_norm,none": 0.30327868852459017,
						"acc_norm_stderr,none": 0.04178859878631876,
						"acc_stderr,none": 0.04178859878631876,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3476190476190476,
						"acc_norm,none": 0.3476190476190476,
						"acc_norm_stderr,none": 0.03294043089165083,
						"acc_stderr,none": 0.03294043089165083,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.32222222222222224,
						"acc_norm,none": 0.32222222222222224,
						"acc_norm_stderr,none": 0.03492970288642683,
						"acc_stderr,none": 0.03492970288642683,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.328042328042328,
						"acc_norm,none": 0.328042328042328,
						"acc_norm_stderr,none": 0.0342418307585366,
						"acc_stderr,none": 0.0342418307585366,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.04167808180844153,
						"acc_stderr,none": 0.04167808180844153,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.296551724137931,
						"acc_norm,none": 0.296551724137931,
						"acc_norm_stderr,none": 0.03806142687309994,
						"acc_stderr,none": 0.03806142687309994,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3238095238095238,
						"acc_norm,none": 0.3238095238095238,
						"acc_norm_stderr,none": 0.04588414718067474,
						"acc_stderr,none": 0.04588414718067474,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2742857142857143,
						"acc_norm,none": 0.2742857142857143,
						"acc_norm_stderr,none": 0.03382281937517294,
						"acc_stderr,none": 0.03382281937517294,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26066350710900477,
						"acc_norm,none": 0.26066350710900477,
						"acc_norm_stderr,none": 0.030293645661742804,
						"acc_stderr,none": 0.030293645661742804,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.26861702127659576,
						"acc_norm,none": 0.26861702127659576,
						"acc_norm_stderr,none": 0.022888827968077056,
						"acc_stderr,none": 0.022888827968077056,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.0312732353098133,
						"acc_stderr,none": 0.0312732353098133,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.03613730415279119,
						"acc_stderr,none": 0.03613730415279119,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.33185840707964603,
						"acc_norm,none": 0.33185840707964603,
						"acc_norm_stderr,none": 0.03139203046282125,
						"acc_stderr,none": 0.03139203046282125,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3212121212121212,
						"acc_norm,none": 0.3212121212121212,
						"acc_norm_stderr,none": 0.03646204963253812,
						"acc_stderr,none": 0.03646204963253812,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.032530209055933366,
						"acc_stderr,none": 0.032530209055933366,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.30177514792899407,
						"acc_norm,none": 0.30177514792899407,
						"acc_norm_stderr,none": 0.03541479614288122,
						"acc_stderr,none": 0.03541479614288122,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2981366459627329,
						"acc_norm,none": 0.2981366459627329,
						"acc_norm_stderr,none": 0.03616379286462019,
						"acc_stderr,none": 0.03616379286462019,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.03675892481369823,
						"acc_stderr,none": 0.03675892481369823,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.15650538724366975,
						"mcc_stderr,none": 0.02722460242283427
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.034873508801977704,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.401032349433512,
						"likelihood_diff_stderr,none": 0.49369241162259725,
						"pct_stereotype,none": 0.5928741800834824,
						"pct_stereotype_stderr,none": 0.0735708212675542
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.607334525939177,
						"likelihood_diff_stderr,none": 0.0854227572676783,
						"pct_stereotype,none": 0.6267143709004174,
						"pct_stereotype_stderr,none": 0.01181458455631237
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.082417582417582,
						"likelihood_diff_stderr,none": 0.3820272571018019,
						"pct_stereotype,none": 0.6813186813186813,
						"pct_stereotype_stderr,none": 0.04911704114831278
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.204545454545454,
						"likelihood_diff_stderr,none": 1.8162205330833003,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.815384615384615,
						"likelihood_diff_stderr,none": 0.6169743588411561,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.632421875,
						"likelihood_diff_stderr,none": 0.16048524936267003,
						"pct_stereotype,none": 0.615625,
						"pct_stereotype_stderr,none": 0.027235813331371494
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.603009259259259,
						"likelihood_diff_stderr,none": 0.22937748320507861,
						"pct_stereotype,none": 0.5694444444444444,
						"pct_stereotype_stderr,none": 0.03376922151252335
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.8715277777777777,
						"likelihood_diff_stderr,none": 0.31705379473020917,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.34867125984252,
						"likelihood_diff_stderr,none": 0.14727062355059967,
						"pct_stereotype,none": 0.5295275590551181,
						"pct_stereotype_stderr,none": 0.022167024359332235
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.550675675675676,
						"likelihood_diff_stderr,none": 0.34619205852547164,
						"pct_stereotype,none": 0.7207207207207207,
						"pct_stereotype_stderr,none": 0.04277662524881439
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.823924731182796,
						"likelihood_diff_stderr,none": 0.43743210819955614,
						"pct_stereotype,none": 0.8924731182795699,
						"pct_stereotype_stderr,none": 0.032297000033640014
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.184210526315789,
						"likelihood_diff_stderr,none": 0.24653453761990077,
						"pct_stereotype,none": 0.6736842105263158,
						"pct_stereotype_stderr,none": 0.0341048643533449
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.195736434108527,
						"likelihood_diff_stderr,none": 0.07368733364073403,
						"pct_stereotype,none": 0.5587358378056052,
						"pct_stereotype_stderr,none": 0.012128737185968451
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.102777777777778,
						"likelihood_diff_stderr,none": 0.3016779552910003,
						"pct_stereotype,none": 0.5444444444444444,
						"pct_stereotype_stderr,none": 0.05279009646630345
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.4615384615384617,
						"likelihood_diff_stderr,none": 0.9106061985925572,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.757575757575758,
						"likelihood_diff_stderr,none": 0.39915301122324126,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.056159743502623156
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.6931464174454827,
						"likelihood_diff_stderr,none": 0.12493889363563694,
						"pct_stereotype,none": 0.5856697819314641,
						"pct_stereotype_stderr,none": 0.027537513530626192
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.474308300395257,
						"likelihood_diff_stderr,none": 0.1989913947691685,
						"pct_stereotype,none": 0.41106719367588934,
						"pct_stereotype_stderr,none": 0.030994812415369746
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.670138888888889,
						"likelihood_diff_stderr,none": 0.43382585287224057,
						"pct_stereotype,none": 0.6944444444444444,
						"pct_stereotype_stderr,none": 0.05466818705978919
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.788586956521739,
						"likelihood_diff_stderr,none": 0.13686149002042072,
						"pct_stereotype,none": 0.4369565217391304,
						"pct_stereotype_stderr,none": 0.023151745316873387
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.2793478260869566,
						"likelihood_diff_stderr,none": 0.2935606362341876,
						"pct_stereotype,none": 0.6956521739130435,
						"pct_stereotype_stderr,none": 0.043095185024639285
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.3434065934065935,
						"likelihood_diff_stderr,none": 0.3188149692376718,
						"pct_stereotype,none": 0.7802197802197802,
						"pct_stereotype_stderr,none": 0.04364972632898534
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.8941326530612246,
						"likelihood_diff_stderr,none": 0.24443154840647027,
						"pct_stereotype,none": 0.7193877551020408,
						"pct_stereotype_stderr,none": 0.03217492357780148
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.15846456692913385,
						"exact_match_stderr,none": 0.008103027740956018
					},
					"glue": {
						"acc,none": 0.7132526559857594,
						"acc_stderr,none": 0.042112227934448423,
						"alias": "glue",
						"f1,none": 0.6326466704791135,
						"f1_stderr,none": 0.00033722797032780433,
						"mcc,none": 0.15650538724366975,
						"mcc_stderr,none": 0.0007411789770813937
					},
					"hellaswag": {
						"acc,none": 0.5374427404899422,
						"acc_norm,none": 0.7278430591515634,
						"acc_norm_stderr,none": 0.0044416066657879125,
						"acc_stderr,none": 0.004975770805464642,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2872653768408894,
						"acc_norm,none": 0.2872653768408894,
						"acc_norm_stderr,none": 0.028665763438712376,
						"acc_stderr,none": 0.028665763438712376,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909284,
						"acc_stderr,none": 0.04292346959909284,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.286,
						"acc_norm,none": 0.286,
						"acc_norm_stderr,none": 0.014297146862517911,
						"acc_stderr,none": 0.014297146862517911,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.014512395033543148,
						"acc_stderr,none": 0.014512395033543148,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.258,
						"acc_norm,none": 0.258,
						"acc_norm_stderr,none": 0.013842963108656601,
						"acc_stderr,none": 0.013842963108656601,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729486,
						"acc_stderr,none": 0.014013292702729486,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.25166666666666665,
						"acc_norm,none": 0.25166666666666665,
						"acc_norm_stderr,none": 0.01773156149490717,
						"acc_stderr,none": 0.01773156149490717,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.01432694179723156,
						"acc_stderr,none": 0.01432694179723156,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.343,
						"acc_norm,none": 0.343,
						"acc_norm_stderr,none": 0.015019206922356951,
						"acc_stderr,none": 0.015019206922356951,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.316,
						"acc_norm,none": 0.316,
						"acc_norm_stderr,none": 0.014709193056057127,
						"acc_stderr,none": 0.014709193056057127,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.032166339033750324,
						"acc_stderr,none": 0.032166339033750324,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.319,
						"acc_norm,none": 0.319,
						"acc_norm_stderr,none": 0.014746404865473494,
						"acc_stderr,none": 0.014746404865473494,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.26153846153846155,
						"acc_norm,none": 0.26153846153846155,
						"acc_norm_stderr,none": 0.03869339773766236,
						"acc_stderr,none": 0.03869339773766236,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.04461960433384741,
						"acc_stderr,none": 0.04461960433384741,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.298,
						"acc_norm,none": 0.298,
						"acc_norm_stderr,none": 0.014470846741134705,
						"acc_stderr,none": 0.014470846741134705,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.323,
						"acc_norm,none": 0.323,
						"acc_norm_stderr,none": 0.014794927843348633,
						"acc_stderr,none": 0.014794927843348633,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.013895037677965124,
						"acc_stderr,none": 0.013895037677965124,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.298,
						"acc_norm,none": 0.298,
						"acc_norm_stderr,none": 0.014470846741134713,
						"acc_stderr,none": 0.014470846741134713,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259727,
						"acc_stderr,none": 0.013929286594259727,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.01432694179723156,
						"acc_stderr,none": 0.01432694179723156,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.299,
						"acc_norm,none": 0.299,
						"acc_norm_stderr,none": 0.014484778521220461,
						"acc_stderr,none": 0.014484778521220461,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.307,
						"acc_norm,none": 0.307,
						"acc_norm_stderr,none": 0.014593284892852628,
						"acc_stderr,none": 0.014593284892852628,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.046056618647183814,
						"acc_stderr,none": 0.046056618647183814,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485239,
						"acc_stderr,none": 0.014174516461485239,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.319,
						"acc_norm,none": 0.319,
						"acc_norm_stderr,none": 0.014746404865473477,
						"acc_stderr,none": 0.014746404865473477,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.283,
						"acc_norm,none": 0.283,
						"acc_norm_stderr,none": 0.014251810906481732,
						"acc_stderr,none": 0.014251810906481732,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750643,
						"acc_stderr,none": 0.013626065817750643,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.014282120955200487,
						"acc_stderr,none": 0.014282120955200487,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314133,
						"acc_stderr,none": 0.013644675781314133,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.25833333333333336,
						"acc_norm,none": 0.25833333333333336,
						"acc_norm_stderr,none": 0.017884680783142228,
						"acc_stderr,none": 0.017884680783142228,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234188,
						"acc_stderr,none": 0.013807775152234188,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.279,
						"acc_norm,none": 0.279,
						"acc_norm_stderr,none": 0.014190150117612037,
						"acc_stderr,none": 0.014190150117612037,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.307,
						"acc_norm,none": 0.307,
						"acc_norm_stderr,none": 0.014593284892852625,
						"acc_stderr,none": 0.014593284892852625,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.01451239503354315,
						"acc_stderr,none": 0.01451239503354315,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322674,
						"acc_stderr,none": 0.041633319989322674,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.02557404853322564,
						"acc_stderr,none": 0.02557404853322564,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568198,
						"acc_stderr,none": 0.014029819522568198,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.304,
						"acc_norm,none": 0.304,
						"acc_norm_stderr,none": 0.0145532056879504,
						"acc_stderr,none": 0.0145532056879504,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.014221154708434946,
						"acc_stderr,none": 0.014221154708434946,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.031093957143700265,
						"acc_stderr,none": 0.031093957143700265,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796282,
						"acc_stderr,none": 0.013996674851796282,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306265,
						"acc_stderr,none": 0.014158794845306265,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.02983202555549523,
						"acc_stderr,none": 0.02983202555549523,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.354,
						"acc_norm,none": 0.354,
						"acc_norm_stderr,none": 0.015129868238451772,
						"acc_stderr,none": 0.015129868238451772,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5358474018855515,
						"acc_norm,none": 0.55,
						"acc_norm_stderr,none": 0.0004959919839679342,
						"acc_stderr,none": 0.04894891516216254,
						"alias": "kobest",
						"f1,none": 0.4640272504035749,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5441595441595442,
						"acc_stderr,none": 0.013296603638702113,
						"alias": " - kobest_boolq",
						"f1,none": 0.43894422410262046,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.639,
						"acc_stderr,none": 0.015195720118175111,
						"alias": " - kobest_copa",
						"f1,none": 0.6380585903936138,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.43,
						"acc_norm,none": 0.55,
						"acc_norm_stderr,none": 0.022270877485360434,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.42518266565885615,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5491183879093199,
						"acc_stderr,none": 0.025004412942296043,
						"alias": " - kobest_sentineg",
						"f1,none": 0.48081067851219744,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.48253968253968255,
						"acc_stderr,none": 0.014082902655048371,
						"alias": " - kobest_wic",
						"f1,none": 0.36398320306209664,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7134678827867261,
						"acc_stderr,none": 0.01751591427398706,
						"alias": "lambada",
						"perplexity,none": 3.6803356500824975,
						"perplexity_stderr,none": 0.19091419550622318
					},
					"lambada_cloze": {
						"acc,none": 0.03289346012031826,
						"acc_stderr,none": 0.002541377441051892,
						"alias": "lambada_cloze",
						"perplexity,none": 522.7010411569611,
						"perplexity_stderr,none": 127.61113451422388
					},
					"lambada_multilingual": {
						"acc,none": 0.5425189210168834,
						"acc_stderr,none": 0.0841264148047786,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.830489716125598,
						"perplexity_stderr,none": 8.132453127807624
					},
					"lambada_openai": {
						"acc,none": 0.7461672811954202,
						"acc_stderr,none": 0.006063229044159063,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3279099980503535,
						"perplexity_stderr,none": 0.06476419656839366
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.03396079953425189,
						"acc_stderr,none": 0.0025234714805461556,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 270.3721294827472,
						"perplexity_stderr,none": 8.450281659944089
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.43236949349893267,
						"acc_stderr,none": 0.006901960330472658,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.105340588422905,
						"perplexity_stderr,none": 1.8864872244083464
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7463613429070445,
						"acc_stderr,none": 0.006061698956508257,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.327847358956944,
						"perplexity_stderr,none": 0.06479965891305235
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.456821269163594,
						"acc_stderr,none": 0.0069399542718724125,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.689863983359988,
						"perplexity_stderr,none": 1.4082585818947417
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5563749272268581,
						"acc_stderr,none": 0.006921558436638479,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.342328746020012,
						"perplexity_stderr,none": 0.7951833670124188
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5206675722879875,
						"acc_stderr,none": 0.006960024098619231,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.687067903868144,
						"perplexity_stderr,none": 1.1498224059731261
					},
					"lambada_standard": {
						"acc,none": 0.6807684843780322,
						"acc_stderr,none": 0.006494783427738673,
						"alias": " - lambada_standard",
						"perplexity,none": 4.032353579671364,
						"perplexity_stderr,none": 0.08206286354670246
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.03182612070638463,
						"acc_stderr,none": 0.0024455728613517404,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 775.029952831175,
						"perplexity_stderr,none": 25.687310087926143
					},
					"logiqa": {
						"acc,none": 0.24423963133640553,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.01780386214853801,
						"acc_stderr,none": 0.016851689430077556,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2608142493638677,
						"acc_norm,none": 0.2907124681933842,
						"acc_norm_stderr,none": 0.01145657755781322,
						"acc_stderr,none": 0.0110778213776563,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2556113902847571,
						"acc_norm,none": 0.2556113902847571,
						"acc_norm_stderr,none": 0.007985287397847436,
						"acc_stderr,none": 0.007985287397847436,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3852997246346113,
						"acc_stderr,none": 0.005008665796520443,
						"alias": "mc_taco",
						"f1,none": 0.5174592617226471,
						"f1_stderr,none": 0.005561136209324997
					},
					"medmcqa": {
						"acc,none": 0.34018646904135785,
						"acc_norm,none": 0.34018646904135785,
						"acc_norm_stderr,none": 0.007326172028144662,
						"acc_stderr,none": 0.007326172028144662,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3605655930871956,
						"acc_norm,none": 0.3605655930871956,
						"acc_norm_stderr,none": 0.013463146942838355,
						"acc_stderr,none": 0.013463146942838355,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.39894601908560035,
						"acc_stderr,none": 0.08243052855347992,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.3851851851851852,
						"acc_stderr,none": 0.04203921040156279,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.40131578947368424,
						"acc_stderr,none": 0.039889037033362836,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.39245283018867927,
						"acc_stderr,none": 0.03005258057955785,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4236111111111111,
						"acc_stderr,none": 0.0413212501972337,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206824,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3815028901734104,
						"acc_stderr,none": 0.03703851193099521,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.042801058373643966,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.050251890762960605,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3659574468085106,
						"acc_stderr,none": 0.031489558297455304,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2807017543859649,
						"acc_stderr,none": 0.042270544512321984,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4413793103448276,
						"acc_stderr,none": 0.04137931034482758,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.30687830687830686,
						"acc_stderr,none": 0.023752928712112122,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.36507936507936506,
						"acc_stderr,none": 0.04306241259127153,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.45161290322580644,
						"acc_stderr,none": 0.02831050034856839,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.33497536945812806,
						"acc_stderr,none": 0.033208527423483104,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.049756985195624284,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5333333333333333,
						"acc_stderr,none": 0.03895658065271846,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4696969696969697,
						"acc_stderr,none": 0.03555804051763928,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5284974093264249,
						"acc_stderr,none": 0.03602573571288443,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.36923076923076925,
						"acc_stderr,none": 0.024468615241478916,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_stderr,none": 0.026962424325073835,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3739495798319328,
						"acc_stderr,none": 0.03142946637883708,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.24503311258278146,
						"acc_stderr,none": 0.035118075718047245,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5247706422018349,
						"acc_stderr,none": 0.02141099975363592,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.26851851851851855,
						"acc_stderr,none": 0.030225226160012404,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4852941176470588,
						"acc_stderr,none": 0.03507793834791324,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5063291139240507,
						"acc_stderr,none": 0.032544620107678585,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.39461883408071746,
						"acc_stderr,none": 0.03280400504755291,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5038167938931297,
						"acc_stderr,none": 0.04385162325601553,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.37130712008501593,
						"acc_stderr,none": 0.08058779150477699,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4132231404958678,
						"acc_stderr,none": 0.04495087843548408,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.49074074074074076,
						"acc_stderr,none": 0.04832853553437055,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3803680981595092,
						"acc_stderr,none": 0.03814269893261835,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.35714285714285715,
						"acc_stderr,none": 0.04547960999764376,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5145631067961165,
						"acc_stderr,none": 0.049486373240266356,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6410256410256411,
						"acc_stderr,none": 0.03142616993791923,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.04975698519562427,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5606641123882503,
						"acc_stderr,none": 0.01774787424568361,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.37283236994219654,
						"acc_stderr,none": 0.02603389061357628,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2245810055865922,
						"acc_stderr,none": 0.013956803666544641,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.43137254901960786,
						"acc_stderr,none": 0.02835895631342355,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.45123913743160604,
						"acc_stderr,none": 0.08088272662485922,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4758842443729904,
						"acc_stderr,none": 0.028365041542564584,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.44753086419753085,
						"acc_stderr,none": 0.027667138569422704,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.29432624113475175,
						"acc_stderr,none": 0.027187127011503796,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3259452411994785,
						"acc_stderr,none": 0.011971507294982772,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4117647058823529,
						"acc_stderr,none": 0.02989616303312547,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.4068627450980392,
						"acc_stderr,none": 0.019873802005061177,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4090909090909091,
						"acc_stderr,none": 0.04709306978661895,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3306122448979592,
						"acc_stderr,none": 0.030116426296540596,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4406889827754306,
						"acc_stderr,none": 0.06874869757119362,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5671641791044776,
						"acc_stderr,none": 0.03503490923673281,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.34792261338407865,
						"acc_stderr,none": 0.07394260175893973,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.049999999999999996,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.37349397590361444,
						"acc_stderr,none": 0.03765845117168863,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6198830409356725,
						"acc_stderr,none": 0.037229657413855394,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7725929699439633,
						"acc_stderr,none": 0.004231109767567911,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7703417412530512,
						"acc_stderr,none": 0.004242129295992579,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7328431372549019,
						"acc_stderr,none": 0.021932668544150203,
						"alias": "mrpc",
						"f1,none": 0.8299531981279251,
						"f1_stderr,none": 0.01605185067039686
					},
					"multimedqa": {
						"acc,none": 0.38381831085876505,
						"acc_norm,none": 0.35013841571764037,
						"acc_norm_stderr,none": 0.000107888339920679,
						"acc_stderr,none": 0.09205764299170353,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5523927392739274,
						"acc_stderr,none": 0.007142266191092191,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.70438299632503,
						"mrr_stderr,none": 0.010334633231787481,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4164785553047404,
						"r@2_stderr,none": 0.01657116712766196
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6555680979724393,
						"mrr_stderr,none": 0.010475406702227427,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.463882618510158,
						"r@2_stderr,none": 0.016763409667403396
					},
					"openbookqa": {
						"acc,none": 0.3,
						"acc_norm,none": 0.41,
						"acc_norm_stderr,none": 0.022017482578127676,
						"acc_stderr,none": 0.02051442622562804,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.451,
						"acc_stderr,none": 0.011129305041886323,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.383,
						"acc_stderr,none": 0.010872654105766948,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4185,
						"acc_stderr,none": 0.01103357353138304,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.011135708419359796,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.011172305500884872,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.533,
						"acc_stderr,none": 0.01115875256825067,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5245,
						"acc_stderr,none": 0.011169702598013186,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4825714285714286,
						"acc_stderr,none": 0.05011655600534673,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7763873775843307,
						"acc_norm,none": 0.7889009793253536,
						"acc_norm_stderr,none": 0.009521377378734151,
						"acc_stderr,none": 0.0097214895191763,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.25352263023057214,
						"acc_norm,none": 0.28229077711357814,
						"acc_norm_stderr,none": 0.0032884841995073646,
						"acc_stderr,none": 0.003178263060501854,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.734,
						"acc_stderr,none": 0.01978055967565549,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7371012082692274,
						"acc_norm,none": 0.6389293549400216,
						"acc_norm_stderr,none": 0.00980933686646588,
						"acc_stderr,none": 0.1523998964694732,
						"alias": "pythia",
						"bits_per_byte,none": 0.6328346375272971,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5506086709174187,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3279099980503535,
						"perplexity_stderr,none": 0.06476419656839366,
						"word_perplexity,none": 10.44001577641618,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.38652482269503546,
						"acc_norm,none": 0.4521276595744681,
						"acc_norm_stderr,none": 0.057945047680791534,
						"acc_stderr,none": 0.044400168586075706,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.45,
						"acc_norm,none": 0.55,
						"acc_norm_stderr,none": 0.04560517440787951,
						"acc_stderr,none": 0.04560517440787952,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.35625,
						"acc_norm,none": 0.475,
						"acc_norm_stderr,none": 0.039602982544438455,
						"acc_stderr,none": 0.03797847267587851,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3767605633802817,
						"acc_norm,none": 0.397887323943662,
						"acc_norm_stderr,none": 0.029095492917064893,
						"acc_stderr,none": 0.028804939288711227,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.49441698700347797,
						"acc_stderr,none": 0.00676498878247421,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7195646796932971,
						"acc_stderr,none": 0.002234111143525392,
						"alias": "qqp",
						"f1,none": 0.6309725296185392,
						"f1_stderr,none": 0.0032185784042686977
					},
					"race": {
						"acc,none": 0.35311004784688993,
						"acc_stderr,none": 0.014791764754619243,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2697,
						"em_stderr,none": 0.004438261012613159,
						"f1,none": 0.2793319050014019,
						"f1_stderr,none": 0.00444913950167052
					},
					"rte": {
						"acc,none": 0.6787003610108303,
						"acc_stderr,none": 0.02810862605328869,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.959,
						"acc_norm,none": 0.952,
						"acc_norm_stderr,none": 0.006763264133666673,
						"acc_stderr,none": 0.006273624021118768,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6787003610108303,
						"acc_stderr,none": 0.02810862605328869,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.7591743119266054,
						"acc_stderr,none": 0.014488154868754016,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.573927821653504,
						"acc_norm,none": 0.7692692192342298,
						"acc_norm_stderr,none": 0.0029786718948360907,
						"acc_stderr,none": 0.0034962376835383493,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6736215100994976,
						"acc_stderr,none": 0.08447157646683724,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5956530448717948,
						"acc_stderr,none": 0.0049118289678237535,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8530455052194182,
						"acc_stderr,none": 0.0035645686314214756,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5763725490196079,
						"acc_stderr,none": 0.004892883728751374,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.32746936852663516,
						"acc_stderr,none": 0.0014562895588545325,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3072215422276622,
						"bleu_acc_stderr,none": 0.016150201321323013,
						"bleu_diff,none": -8.214221247964888,
						"bleu_diff_stderr,none": 0.8658822229968159,
						"bleu_max,none": 27.310848423515786,
						"bleu_max_stderr,none": 0.8092416776606851,
						"rouge1_acc,none": 0.2778457772337821,
						"rouge1_acc_stderr,none": 0.01568092936402465,
						"rouge1_diff,none": -10.614300262439293,
						"rouge1_diff_stderr,none": 0.9151281001062015,
						"rouge1_max,none": 52.84718552395938,
						"rouge1_max_stderr,none": 0.8575997575020479,
						"rouge2_acc,none": 0.24724602203182375,
						"rouge2_acc_stderr,none": 0.015102404797359652,
						"rouge2_diff,none": -12.53777492354921,
						"rouge2_diff_stderr,none": 1.1187754496884528,
						"rouge2_max,none": 36.68614165650879,
						"rouge2_max_stderr,none": 1.0198599269927322,
						"rougeL_acc,none": 0.2802937576499388,
						"rougeL_acc_stderr,none": 0.015723139524608756,
						"rougeL_diff,none": -10.894530995061702,
						"rougeL_diff_stderr,none": 0.9347372413501509,
						"rougeL_max,none": 49.825789813750234,
						"rougeL_max_stderr,none": 0.8774269078697249
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3072215422276622,
						"bleu_acc_stderr,none": 0.016150201321323013,
						"bleu_diff,none": -8.214221247964888,
						"bleu_diff_stderr,none": 0.8658822229968159,
						"bleu_max,none": 27.310848423515786,
						"bleu_max_stderr,none": 0.8092416776606851,
						"rouge1_acc,none": 0.2778457772337821,
						"rouge1_acc_stderr,none": 0.01568092936402465,
						"rouge1_diff,none": -10.614300262439293,
						"rouge1_diff_stderr,none": 0.9151281001062015,
						"rouge1_max,none": 52.84718552395938,
						"rouge1_max_stderr,none": 0.8575997575020479,
						"rouge2_acc,none": 0.24724602203182375,
						"rouge2_acc_stderr,none": 0.015102404797359652,
						"rouge2_diff,none": -12.53777492354921,
						"rouge2_diff_stderr,none": 1.1187754496884528,
						"rouge2_max,none": 36.68614165650879,
						"rouge2_max_stderr,none": 1.0198599269927322,
						"rougeL_acc,none": 0.2802937576499388,
						"rougeL_acc_stderr,none": 0.015723139524608756,
						"rougeL_diff,none": -10.894530995061702,
						"rougeL_diff_stderr,none": 0.9347372413501509,
						"rougeL_max,none": 49.825789813750234,
						"rougeL_max_stderr,none": 0.8774269078697249
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25703794369645044,
						"acc_stderr,none": 0.015298077509485083,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.39790079335681994,
						"acc_stderr,none": 0.014035677200722569,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.15846456692913385,
						"exact_match_stderr,none": 0.008103027740956018
					},
					"wic": {
						"acc,none": 0.5454545454545454,
						"acc_stderr,none": 0.019728688969162265,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6328346375272971,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5506086709174187,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.44001577641618,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7016574585635359,
						"acc_stderr,none": 0.012858885010030432,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.47115384615384615,
						"acc_stderr,none": 0.04918440626354964,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8608058608058609,
						"acc_stderr,none": 0.020988366070851,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6214545454545454,
						"acc_stderr,none": 0.07023171601514061,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.021814300984787635,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.020553269174209188,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.019684688820194723,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.022252153078595897,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.02217510926561316,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.02204949796982787,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.642,
						"acc_stderr,none": 0.021461434862859126,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.020099950647503226,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.02055326917420918,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.44061579651941096,
						"acc_stderr,none": 0.049870647580300576,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.00946022348499647,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.47630522088353416,
						"acc_stderr,none": 0.010010812905412066,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4835341365461847,
						"acc_stderr,none": 0.010016636930829976,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39759036144578314,
						"acc_stderr,none": 0.00980960299607581,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5248995983935743,
						"acc_stderr,none": 0.01000963798302251,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5120481927710844,
						"acc_stderr,none": 0.010019162857624489,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5040160642570282,
						"acc_stderr,none": 0.010021749574555901,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41606425702811245,
						"acc_stderr,none": 0.00987984851147976,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.48714859437751,
						"acc_stderr,none": 0.010018761856718258,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.40281124497991966,
						"acc_stderr,none": 0.009830919849814058,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42208835341365464,
						"acc_stderr,none": 0.009899652714895422,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4614457831325301,
						"acc_stderr,none": 0.009992234275993063,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41887550200803214,
						"acc_stderr,none": 0.009889278882314554,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41767068273092367,
						"acc_stderr,none": 0.009885277727840168,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3497991967871486,
						"acc_stderr,none": 0.009559181474778286,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.628722700198544,
						"acc_stderr,none": 0.060089979068040984,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5903375248180013,
						"acc_stderr,none": 0.012655369030750353,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7769688947716744,
						"acc_stderr,none": 0.010712628906979185,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.71409662475182,
						"acc_stderr,none": 0.01162785634694061,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5757776307081403,
						"acc_stderr,none": 0.012718494399531065,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5923229649238915,
						"acc_stderr,none": 0.012645876488040305,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6604897418927862,
						"acc_stderr,none": 0.012186276146659446,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5512905360688286,
						"acc_stderr,none": 0.012799246690109756,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6770350761085374,
						"acc_stderr,none": 0.012033578346967673,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5545996029119789,
						"acc_stderr,none": 0.012790178438084812,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5784248841826605,
						"acc_stderr,none": 0.0127078621318019,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6446062210456651,
						"acc_stderr,none": 0.012317247930418374,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8172623061362104,
						"acc_stderr,none": 0.039142099484241494,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8834408602150537,
						"acc_stderr,none": 0.006656467960805351,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109368,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.735140771637122,
						"acc_stderr,none": 0.0142564060066853,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7832699619771863,
						"acc_stderr,none": 0.025454504291142595,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6857142857142857,
						"acc_stderr,none": 0.026198057744026396,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7817460317460317,
						"acc_stderr,none": 0.018417468024139707,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/1_3-C5-rwkv-270_pth"
	},
	"./rwkv-x-dev/225-EagleX-PreFT-A": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6383878241262683,
						"acc_norm,none": 0.6214768883878241,
						"acc_norm_stderr,none": 0.07923091639527215,
						"acc_stderr,none": 0.10580906465264833,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.505,
						"acc_stderr,none": 0.05555881476263796,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.1719,
						"acc_stderr,none": 0.22344154701416394,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8249253731343283,
						"acc_stderr,none": 0.16011423295280272,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2607726597325409,
						"acc_norm,none": 0.2607726597325409,
						"acc_norm_stderr,none": 0.11719712865535459,
						"acc_stderr,none": 0.11719712865535459,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2900189949922292,
						"acc_norm,none": 0.2900189949922292,
						"acc_norm_stderr,none": 0.04966015730854232,
						"acc_stderr,none": 0.04966015730854232,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.708836463923673,
						"likelihood_diff_stderr,none": 0.5518418885554971,
						"pct_stereotype,none": 0.6113595706618963,
						"pct_stereotype_stderr,none": 0.07105305347739814
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.04576771653543307,
						"exact_match_stderr,none": 0.004637156076081139
					},
					"glue": {
						"acc,none": 0.7565954025726537,
						"acc_stderr,none": 0.004859001308903468,
						"alias": "glue",
						"f1,none": 0.7314132522295939,
						"f1_stderr,none": 9.292154445208698e-05,
						"mcc,none": 0.13589511140750968,
						"mcc_stderr,none": 0.03395515679038307
					},
					"kmmlu": {
						"acc,none": 0.27118105688709204,
						"acc_norm,none": 0.27118105688709204,
						"acc_norm_stderr,none": 0.026383979802907905,
						"acc_stderr,none": 0.026383979802907905,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5880289410217058,
						"acc_norm,none": 0.548,
						"acc_norm_stderr,none": 0.0004963847695390732,
						"acc_stderr,none": 0.07010669732382775,
						"alias": "kobest",
						"f1,none": 0.5686870768384056,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7221036289540074,
						"acc_stderr,none": 0.014418280349648739,
						"alias": "lambada",
						"perplexity,none": 3.4608068685210376,
						"perplexity_stderr,none": 0.16211868464661763
					},
					"lambada_cloze": {
						"acc,none": 0.06879487677081311,
						"acc_stderr,none": 0.004729803388973157,
						"alias": "lambada_cloze",
						"perplexity,none": 248.1498587243754,
						"perplexity_stderr,none": 11.203330027468471
					},
					"lambada_multilingual": {
						"acc,none": 0.5517950708325248,
						"acc_stderr,none": 0.08185620858418498,
						"alias": "lambada_multilingual",
						"perplexity,none": 19.35124603027142,
						"perplexity_stderr,none": 7.565818767033127
					},
					"mmlu": {
						"acc,none": 0.4344822674832645,
						"acc_stderr,none": 0.10034267100203663,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.4099893730074389,
						"acc_stderr,none": 0.11114918281073527,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.48632121017058255,
						"acc_stderr,none": 0.08750625836621839,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4848878778030549,
						"acc_stderr,none": 0.08936666591582938,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3707580082461148,
						"acc_stderr,none": 0.07700670240886777,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.4106458481192335,
						"acc_norm,none": 0.3822794275176328,
						"acc_norm_stderr,none": 0.00010930790713489269,
						"acc_stderr,none": 0.06374871155445498,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4735,
						"acc_stderr,none": 0.058882214166930576,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7501462337982135,
						"acc_norm,none": 0.6258412991530274,
						"acc_norm_stderr,none": 0.008326494163455439,
						"acc_stderr,none": 0.15286284124750957,
						"alias": "pythia",
						"bits_per_byte,none": 0.6390060682476366,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5572559322219164,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.168243885148654,
						"perplexity_stderr,none": 0.06146529860251976,
						"word_perplexity,none": 10.681581847528923,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.375886524822695,
						"acc_norm,none": 0.42730496453900707,
						"acc_norm_stderr,none": 0.05482289826213123,
						"acc_stderr,none": 0.04158968534104432,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6642041862167648,
						"acc_stderr,none": 0.07279024716867014,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.34011384289858004,
						"acc_stderr,none": 0.0014771726697635596,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -7.980583337371578,
						"bleu_diff_stderr,none": 0.852897458608575,
						"bleu_max,none": 26.551295548659205,
						"bleu_max_stderr,none": 0.8011498231316072,
						"rouge1_acc,none": 0.28886168910648713,
						"rouge1_acc_stderr,none": 0.015866346401384304,
						"rouge1_diff,none": -9.821103815997025,
						"rouge1_diff_stderr,none": 0.9028693256633942,
						"rouge1_max,none": 52.182254980688725,
						"rouge1_max_stderr,none": 0.8530280101906371,
						"rouge2_acc,none": 0.25458996328029376,
						"rouge2_acc_stderr,none": 0.01525011707915649,
						"rouge2_diff,none": -11.491573113934427,
						"rouge2_diff_stderr,none": 1.1000281924780684,
						"rouge2_max,none": 36.39506840157793,
						"rouge2_max_stderr,none": 0.9973160949093164,
						"rougeL_acc,none": 0.29008567931456547,
						"rougeL_acc_stderr,none": 0.01588623687420952,
						"rougeL_diff,none": -10.04885259335416,
						"rougeL_diff_stderr,none": 0.9171809739466521,
						"rougeL_max,none": 49.24188675732332,
						"rougeL_max_stderr,none": 0.8724251057774257
					},
					"xcopa": {
						"acc,none": 0.6256363636363637,
						"acc_stderr,none": 0.07173344559836263,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4378580990629183,
						"acc_stderr,none": 0.046141989397628855,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6344383611094399,
						"acc_stderr,none": 0.060996520625444285,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8136659923578332,
						"acc_stderr,none": 0.038359101094717726,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6383878241262683,
						"acc_norm,none": 0.6214768883878241,
						"acc_norm_stderr,none": 0.07923091639527215,
						"acc_stderr,none": 0.10580906465264833,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.505,
						"acc_stderr,none": 0.05555881476263796,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.015356947477797577,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.443,
						"acc_stderr,none": 0.0157161699532041,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.4608333333333333,
						"acc_stderr,none": 0.014395404356043523,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.41467576791808874,
						"acc_norm,none": 0.454778156996587,
						"acc_norm_stderr,none": 0.014551507060836352,
						"acc_stderr,none": 0.014397070564409172,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7487373737373737,
						"acc_norm,none": 0.7037037037037037,
						"acc_norm_stderr,none": 0.009369711585684304,
						"acc_stderr,none": 0.008900141191221643,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.1719,
						"acc_stderr,none": 0.22344154701416394,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.1255,
						"acc_stderr,none": 0.007409610392124575,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.2725,
						"acc_stderr,none": 0.00995848686951823,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0665,
						"acc_stderr,none": 0.005572647683202411,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.9175,
						"acc_stderr,none": 0.006153519960473979,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0065,
						"acc_stderr,none": 0.0017973564602277773,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.17,
						"acc_stderr,none": 0.008401505379771048,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.003,
						"acc_stderr,none": 0.0012232122154647114,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0875,
						"acc_stderr,none": 0.006319956164639151,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.0007069298939339537,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.069,
						"acc_stderr,none": 0.005668824197652675,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.004338394793926247,
						"acc_stderr,none": 0.0013692387389319528,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8249253731343283,
						"acc_stderr,none": 0.16011423295280272,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400241,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.00408995448968904,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578206,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024957,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697598,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319312,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.01360735683959812,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.00768700787628641,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.00863212103213996,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.002987963843142643,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140931,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.0045364721513064974,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118768,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.0075137511574749185,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792954,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178327,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280308,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665544,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.68,
						"acc_stderr,none": 0.014758652303574874,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.713,
						"acc_stderr,none": 0.01431208705380996,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697045,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408032,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274703,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.015806639423035167,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942303,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731973,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.605,
						"acc_stderr,none": 0.015466551464829345,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.733,
						"acc_stderr,none": 0.01399667485179627,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524306,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753655,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.00882342636694232,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244052,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.812,
						"acc_stderr,none": 0.012361586015103773,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.0076870078762864185,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.314,
						"acc_stderr,none": 0.01468399195108797,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.626,
						"acc_stderr,none": 0.015308767369006378,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.01556091713692166,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.627,
						"acc_stderr,none": 0.01530049362292281,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.015786868759359016,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942317,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557844005,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248114,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.00431945108291064,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340988,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.014205696104091512,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.782,
						"acc_stderr,none": 0.013063179040595311,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406136,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525023,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.00199699473909873,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.618,
						"acc_stderr,none": 0.015372453034968531,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.015752210388771847,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042967,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698467,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.636,
						"acc_stderr,none": 0.015222868840522019,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.011419913065098703,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621223,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.333,
						"acc_stderr,none": 0.014910846164229854,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.818,
						"acc_stderr,none": 0.012207580637662146,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704164,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343977,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.005568393575081361,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.396,
						"acc_stderr,none": 0.015473313265859408,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.377,
						"acc_stderr,none": 0.015333170125779843,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.691131498470948,
						"acc_stderr,none": 0.008080899275231321,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8571428571428571,
						"acc_stderr,none": 0.04718416136255828,
						"alias": "cb",
						"f1,none": 0.6869845948696355,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2607726597325409,
						"acc_norm,none": 0.2607726597325409,
						"acc_norm_stderr,none": 0.11719712865535459,
						"acc_stderr,none": 0.11719712865535459,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757577,
						"acc_stderr,none": 0.07575757575757577,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.0659529705144534,
						"acc_stderr,none": 0.0659529705144534,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445796,
						"acc_stderr,none": 0.06180629713445796,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063838,
						"acc_stderr,none": 0.07213122508063838,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434487,
						"acc_stderr,none": 0.07233518641434487,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.060073850409370216,
						"acc_stderr,none": 0.060073850409370216,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.11236664374387367,
						"acc_stderr,none": 0.11236664374387367,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033673,
						"acc_stderr,none": 0.10083169033033673,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.07335878043508444,
						"acc_stderr,none": 0.07335878043508444,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.32608695652173914,
						"acc_norm,none": 0.32608695652173914,
						"acc_norm_stderr,none": 0.06988152725357213,
						"acc_stderr,none": 0.06988152725357213,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2900189949922292,
						"acc_norm,none": 0.2900189949922292,
						"acc_norm_stderr,none": 0.04966015730854232,
						"acc_stderr,none": 0.04966015730854232,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2958579881656805,
						"acc_norm,none": 0.2958579881656805,
						"acc_norm_stderr,none": 0.035214144124964784,
						"acc_stderr,none": 0.035214144124964784,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2635135135135135,
						"acc_norm,none": 0.2635135135135135,
						"acc_norm_stderr,none": 0.036335000433819875,
						"acc_stderr,none": 0.036335000433819875,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.35625,
						"acc_norm,none": 0.35625,
						"acc_norm_stderr,none": 0.03797847267587851,
						"acc_stderr,none": 0.03797847267587851,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2606060606060606,
						"acc_norm,none": 0.2606060606060606,
						"acc_norm_stderr,none": 0.03427743175816524,
						"acc_stderr,none": 0.03427743175816524,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.03223012819451556,
						"acc_stderr,none": 0.03223012819451556,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03489370652018761,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.29770992366412213,
						"acc_norm,none": 0.29770992366412213,
						"acc_norm_stderr,none": 0.040103589424622034,
						"acc_stderr,none": 0.040103589424622034,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2867647058823529,
						"acc_norm,none": 0.2867647058823529,
						"acc_norm_stderr,none": 0.038923544178637824,
						"acc_stderr,none": 0.038923544178637824,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.0452235007738203,
						"acc_stderr,none": 0.0452235007738203,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.29102167182662536,
						"acc_norm,none": 0.29102167182662536,
						"acc_norm_stderr,none": 0.02531344242805741,
						"acc_stderr,none": 0.02531344242805741,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.030964517926923382,
						"acc_stderr,none": 0.030964517926923382,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3407821229050279,
						"acc_norm,none": 0.3407821229050279,
						"acc_norm_stderr,none": 0.03552572003977931,
						"acc_stderr,none": 0.03552572003977931,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24472573839662448,
						"acc_norm,none": 0.24472573839662448,
						"acc_norm_stderr,none": 0.027985699387036402,
						"acc_stderr,none": 0.027985699387036402,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004223,
						"acc_stderr,none": 0.04742907046004223,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.330188679245283,
						"acc_norm,none": 0.330188679245283,
						"acc_norm_stderr,none": 0.045894715469579954,
						"acc_stderr,none": 0.045894715469579954,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809826,
						"acc_stderr,none": 0.039578354719809826,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2761904761904762,
						"acc_norm,none": 0.2761904761904762,
						"acc_norm_stderr,none": 0.04384295586918883,
						"acc_stderr,none": 0.04384295586918883,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.25274725274725274,
						"acc_norm,none": 0.25274725274725274,
						"acc_norm_stderr,none": 0.026350722655564398,
						"acc_stderr,none": 0.026350722655564398,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.032834720561085676,
						"acc_stderr,none": 0.032834720561085676,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.28654970760233917,
						"acc_norm,none": 0.28654970760233917,
						"acc_norm_stderr,none": 0.03467826685703826,
						"acc_stderr,none": 0.03467826685703826,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.03653847510896056,
						"acc_stderr,none": 0.03653847510896056,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2589928057553957,
						"acc_norm,none": 0.2589928057553957,
						"acc_norm_stderr,none": 0.037291986581642324,
						"acc_stderr,none": 0.037291986581642324,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.32075471698113206,
						"acc_norm,none": 0.32075471698113206,
						"acc_norm_stderr,none": 0.03713396279871006,
						"acc_stderr,none": 0.03713396279871006,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3374233128834356,
						"acc_norm,none": 0.3374233128834356,
						"acc_norm_stderr,none": 0.03714908409935573,
						"acc_stderr,none": 0.03714908409935573,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.034300856070148815,
						"acc_stderr,none": 0.034300856070148815,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.02874673063268137,
						"acc_stderr,none": 0.02874673063268137,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.031911782267135466,
						"acc_stderr,none": 0.031911782267135466,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.42016806722689076,
						"acc_norm,none": 0.42016806722689076,
						"acc_norm_stderr,none": 0.03206183783236152,
						"acc_stderr,none": 0.03206183783236152,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.27391304347826084,
						"acc_norm,none": 0.27391304347826084,
						"acc_norm_stderr,none": 0.029470189815005897,
						"acc_stderr,none": 0.029470189815005897,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2840909090909091,
						"acc_norm,none": 0.2840909090909091,
						"acc_norm_stderr,none": 0.034090909090909075,
						"acc_stderr,none": 0.034090909090909075,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2953020134228188,
						"acc_norm,none": 0.2953020134228188,
						"acc_norm_stderr,none": 0.03749763364527049,
						"acc_stderr,none": 0.03749763364527049,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2966101694915254,
						"acc_norm,none": 0.2966101694915254,
						"acc_norm_stderr,none": 0.04222776832233627,
						"acc_stderr,none": 0.04222776832233627,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878284,
						"acc_stderr,none": 0.04122066502878284,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.0404061017820884,
						"acc_stderr,none": 0.0404061017820884,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3081395348837209,
						"acc_norm,none": 0.3081395348837209,
						"acc_norm_stderr,none": 0.03530895898152283,
						"acc_stderr,none": 0.03530895898152283,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26277372262773724,
						"acc_norm,none": 0.26277372262773724,
						"acc_norm_stderr,none": 0.021736991810864862,
						"acc_stderr,none": 0.021736991810864862,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.38317757009345793,
						"acc_norm,none": 0.38317757009345793,
						"acc_norm_stderr,none": 0.03331120297324246,
						"acc_stderr,none": 0.03331120297324246,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2764227642276423,
						"acc_norm,none": 0.2764227642276423,
						"acc_norm_stderr,none": 0.0404901546062249,
						"acc_stderr,none": 0.0404901546062249,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2786885245901639,
						"acc_norm,none": 0.2786885245901639,
						"acc_norm_stderr,none": 0.04075944659069252,
						"acc_stderr,none": 0.04075944659069252,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.32857142857142857,
						"acc_norm,none": 0.32857142857142857,
						"acc_norm_stderr,none": 0.0324893979687684,
						"acc_stderr,none": 0.0324893979687684,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.034251778896020865,
						"acc_stderr,none": 0.034251778896020865,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31746031746031744,
						"acc_norm,none": 0.31746031746031744,
						"acc_norm_stderr,none": 0.03394921616447879,
						"acc_stderr,none": 0.03394921616447879,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.04126514736324099,
						"acc_stderr,none": 0.04126514736324099,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.03855289616378948,
						"acc_stderr,none": 0.03855289616378948,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.04429811949614585,
						"acc_stderr,none": 0.04429811949614585,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03424737867752742,
						"acc_stderr,none": 0.03424737867752742,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767256,
						"acc_stderr,none": 0.030113040167767256,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2579787234042553,
						"acc_norm,none": 0.2579787234042553,
						"acc_norm_stderr,none": 0.022593550801056263,
						"acc_stderr,none": 0.022593550801056263,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.0312732353098133,
						"acc_stderr,none": 0.0312732353098133,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.0351734690130024,
						"acc_stderr,none": 0.0351734690130024,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03944624162501116,
						"acc_stderr,none": 0.03944624162501116,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.30973451327433627,
						"acc_norm,none": 0.30973451327433627,
						"acc_norm_stderr,none": 0.030825605846874653,
						"acc_stderr,none": 0.030825605846874653,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.035679697722680474,
						"acc_stderr,none": 0.035679697722680474,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.032092816451453864,
						"acc_stderr,none": 0.032092816451453864,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2781065088757396,
						"acc_norm,none": 0.2781065088757396,
						"acc_norm_stderr,none": 0.034569054303762434,
						"acc_stderr,none": 0.034569054303762434,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2732919254658385,
						"acc_norm,none": 0.2732919254658385,
						"acc_norm_stderr,none": 0.035231683977370906,
						"acc_stderr,none": 0.035231683977370906,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.036121818481912725,
						"acc_stderr,none": 0.036121818481912725,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.13589511140750968,
						"mcc_stderr,none": 0.03395515679038307
					},
					"copa": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.03684529491774711,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.708836463923673,
						"likelihood_diff_stderr,none": 0.5518418885554971,
						"pct_stereotype,none": 0.6113595706618963,
						"pct_stereotype_stderr,none": 0.07105305347739814
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.9926207513416814,
						"likelihood_diff_stderr,none": 0.09383177825652289,
						"pct_stereotype,none": 0.6422182468694096,
						"pct_stereotype_stderr,none": 0.011708827480368516
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.133241758241758,
						"likelihood_diff_stderr,none": 0.40120964358445715,
						"pct_stereotype,none": 0.6703296703296703,
						"pct_stereotype_stderr,none": 0.04955219508596586
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.613636363636363,
						"likelihood_diff_stderr,none": 1.6234655819781854,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.111538461538461,
						"likelihood_diff_stderr,none": 0.6001309645037598,
						"pct_stereotype,none": 0.7846153846153846,
						"pct_stereotype_stderr,none": 0.05138611236879767
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.8359375,
						"likelihood_diff_stderr,none": 0.1687644896469963,
						"pct_stereotype,none": 0.60625,
						"pct_stereotype_stderr,none": 0.027355258158219247
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.933449074074074,
						"likelihood_diff_stderr,none": 0.2619465867863574,
						"pct_stereotype,none": 0.5787037037037037,
						"pct_stereotype_stderr,none": 0.03367462138896078
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.600694444444445,
						"likelihood_diff_stderr,none": 0.39391087772829975,
						"pct_stereotype,none": 0.7777777777777778,
						"pct_stereotype_stderr,none": 0.04933922619854289
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.8289862204724407,
						"likelihood_diff_stderr,none": 0.1699590605450242,
						"pct_stereotype,none": 0.547244094488189,
						"pct_stereotype_stderr,none": 0.022106430541228052
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.195945945945946,
						"likelihood_diff_stderr,none": 0.39080016464634015,
						"pct_stereotype,none": 0.7657657657657657,
						"pct_stereotype_stderr,none": 0.04038097636567092
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.419354838709677,
						"likelihood_diff_stderr,none": 0.5081573831508919,
						"pct_stereotype,none": 0.8709677419354839,
						"pct_stereotype_stderr,none": 0.034950731541029775
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.464473684210526,
						"likelihood_diff_stderr,none": 0.24607990041251815,
						"pct_stereotype,none": 0.6894736842105263,
						"pct_stereotype_stderr,none": 0.03365713545671698
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.422107930828861,
						"likelihood_diff_stderr,none": 0.07903447216158051,
						"pct_stereotype,none": 0.5819916517590936,
						"pct_stereotype_stderr,none": 0.01204796918492052
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.4305555555555554,
						"likelihood_diff_stderr,none": 0.30435939401777823,
						"pct_stereotype,none": 0.6444444444444445,
						"pct_stereotype_stderr,none": 0.05074011803597718
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.2115384615384617,
						"likelihood_diff_stderr,none": 1.0442325400183314,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.246212121212121,
						"likelihood_diff_stderr,none": 0.49406419583976446,
						"pct_stereotype,none": 0.7575757575757576,
						"pct_stereotype_stderr,none": 0.05315503147315326
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.9139408099688473,
						"likelihood_diff_stderr,none": 0.13959352986919335,
						"pct_stereotype,none": 0.616822429906542,
						"pct_stereotype_stderr,none": 0.027177226212327755
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.617588932806324,
						"likelihood_diff_stderr,none": 0.2067131325626559,
						"pct_stereotype,none": 0.4189723320158103,
						"pct_stereotype_stderr,none": 0.03108070121761647
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.5034722222222223,
						"likelihood_diff_stderr,none": 0.38505345642959915,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.05594542388644592
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.233695652173913,
						"likelihood_diff_stderr,none": 0.16239375928819205,
						"pct_stereotype,none": 0.48478260869565215,
						"pct_stereotype_stderr,none": 0.023327190181139237
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.3260869565217392,
						"likelihood_diff_stderr,none": 0.28006097994717666,
						"pct_stereotype,none": 0.7043478260869566,
						"pct_stereotype_stderr,none": 0.04273972288221525
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.208791208791209,
						"likelihood_diff_stderr,none": 0.3065539622155568,
						"pct_stereotype,none": 0.7802197802197802,
						"pct_stereotype_stderr,none": 0.04364972632898533
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.005739795918367,
						"likelihood_diff_stderr,none": 0.24554889802658858,
						"pct_stereotype,none": 0.6887755102040817,
						"pct_stereotype_stderr,none": 0.03315571704943973
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.04576771653543307,
						"exact_match_stderr,none": 0.004637156076081139
					},
					"glue": {
						"acc,none": 0.7565954025726537,
						"acc_stderr,none": 0.004859001308903468,
						"alias": "glue",
						"f1,none": 0.7314132522295939,
						"f1_stderr,none": 9.292154445208698e-05,
						"mcc,none": 0.13589511140750968,
						"mcc_stderr,none": 0.03395515679038307
					},
					"hellaswag": {
						"acc,none": 0.5541724756024696,
						"acc_norm,none": 0.7410874327823143,
						"acc_norm_stderr,none": 0.004371422731216415,
						"acc_stderr,none": 0.004960408362133249,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.27118105688709204,
						"acc_norm,none": 0.27118105688709204,
						"acc_norm_stderr,none": 0.026383979802907905,
						"acc_stderr,none": 0.026383979802907905,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816508,
						"acc_stderr,none": 0.04229525846816508,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145144,
						"acc_stderr,none": 0.013979965645145144,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485256,
						"acc_stderr,none": 0.014174516461485256,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774166,
						"acc_stderr,none": 0.013877773329774166,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.01397996564514516,
						"acc_stderr,none": 0.01397996564514516,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.24666666666666667,
						"acc_norm,none": 0.24666666666666667,
						"acc_norm_stderr,none": 0.017613084291727022,
						"acc_stderr,none": 0.017613084291727022,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.013895037677965133,
						"acc_stderr,none": 0.013895037677965133,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.335,
						"acc_norm,none": 0.335,
						"acc_norm_stderr,none": 0.014933117490932577,
						"acc_stderr,none": 0.014933117490932577,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633915,
						"acc_stderr,none": 0.014046255632633915,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.030897382432918605,
						"acc_stderr,none": 0.030897382432918605,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.01432694179723156,
						"acc_stderr,none": 0.01432694179723156,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.18461538461538463,
						"acc_norm,none": 0.18461538461538463,
						"acc_norm_stderr,none": 0.034160195383985695,
						"acc_stderr,none": 0.034160195383985695,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.013825416526895028,
						"acc_stderr,none": 0.013825416526895028,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.014512395033543147,
						"acc_stderr,none": 0.014512395033543147,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.251,
						"acc_norm,none": 0.251,
						"acc_norm_stderr,none": 0.013718133516888935,
						"acc_stderr,none": 0.013718133516888935,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809949,
						"acc_stderr,none": 0.013963164754809949,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796275,
						"acc_stderr,none": 0.013996674851796275,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729498,
						"acc_stderr,none": 0.014013292702729498,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.01423652621529134,
						"acc_stderr,none": 0.01423652621529134,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485247,
						"acc_stderr,none": 0.014174516461485247,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.04408440022768078,
						"acc_stderr,none": 0.04408440022768078,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.014095022868717604,
						"acc_stderr,none": 0.014095022868717604,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.309,
						"acc_norm,none": 0.309,
						"acc_norm_stderr,none": 0.014619600977206494,
						"acc_stderr,none": 0.014619600977206494,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.01414298497574067,
						"acc_stderr,none": 0.01414298497574067,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.013550631705555946,
						"acc_stderr,none": 0.013550631705555946,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.014205696104091513,
						"acc_stderr,none": 0.014205696104091513,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.01339490288966001,
						"acc_stderr,none": 0.01339490288966001,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.2866666666666667,
						"acc_norm,none": 0.2866666666666667,
						"acc_norm_stderr,none": 0.01847657402752119,
						"acc_stderr,none": 0.01847657402752119,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.258,
						"acc_norm,none": 0.258,
						"acc_norm_stderr,none": 0.013842963108656603,
						"acc_stderr,none": 0.013842963108656603,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.01382541652689503,
						"acc_stderr,none": 0.01382541652689503,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.013912208651021349,
						"acc_stderr,none": 0.013912208651021349,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.293,
						"acc_norm,none": 0.293,
						"acc_norm_stderr,none": 0.014399942998441276,
						"acc_stderr,none": 0.014399942998441276,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.19,
						"acc_norm,none": 0.19,
						"acc_norm_stderr,none": 0.03942772444036623,
						"acc_stderr,none": 0.03942772444036623,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.24333333333333335,
						"acc_norm,none": 0.24333333333333335,
						"acc_norm_stderr,none": 0.02481518457232592,
						"acc_stderr,none": 0.02481518457232592,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234188,
						"acc_stderr,none": 0.013807775152234188,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.274,
						"acc_norm,none": 0.274,
						"acc_norm_stderr,none": 0.014111099288259588,
						"acc_stderr,none": 0.014111099288259588,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796264,
						"acc_stderr,none": 0.013996674851796264,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.030056479497755487,
						"acc_stderr,none": 0.030056479497755487,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.013663187134877654,
						"acc_stderr,none": 0.013663187134877654,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740666,
						"acc_stderr,none": 0.014142984975740666,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.030056479497755487,
						"acc_stderr,none": 0.030056479497755487,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.338,
						"acc_norm,none": 0.338,
						"acc_norm_stderr,none": 0.014965960710224482,
						"acc_stderr,none": 0.014965960710224482,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5880289410217058,
						"acc_norm,none": 0.548,
						"acc_norm_stderr,none": 0.0004963847695390732,
						"acc_stderr,none": 0.07010669732382775,
						"alias": "kobest",
						"f1,none": 0.5686870768384056,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.6638176638176638,
						"acc_stderr,none": 0.012611972415037342,
						"alias": " - kobest_boolq",
						"f1,none": 0.6524424398174857,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.631,
						"acc_stderr,none": 0.015266698139154619,
						"alias": " - kobest_copa",
						"f1,none": 0.6301830744113748,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.432,
						"acc_norm,none": 0.548,
						"acc_norm_stderr,none": 0.022279694107843428,
						"acc_stderr,none": 0.02217510926561316,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.42859657650058824,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.7153652392947103,
						"acc_stderr,none": 0.02267567856186984,
						"alias": " - kobest_sentineg",
						"f1,none": 0.7137305451505658,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.49126984126984125,
						"acc_stderr,none": 0.014089349069808639,
						"alias": " - kobest_wic",
						"f1,none": 0.43644458957918597,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7221036289540074,
						"acc_stderr,none": 0.014418280349648739,
						"alias": "lambada",
						"perplexity,none": 3.4608068685210376,
						"perplexity_stderr,none": 0.16211868464661763
					},
					"lambada_cloze": {
						"acc,none": 0.06879487677081311,
						"acc_stderr,none": 0.004729803388973157,
						"alias": "lambada_cloze",
						"perplexity,none": 248.1498587243754,
						"perplexity_stderr,none": 11.203330027468471
					},
					"lambada_multilingual": {
						"acc,none": 0.5517950708325248,
						"acc_stderr,none": 0.08185620858418498,
						"alias": "lambada_multilingual",
						"perplexity,none": 19.35124603027142,
						"perplexity_stderr,none": 7.565818767033127
					},
					"lambada_openai": {
						"acc,none": 0.746749466330293,
						"acc_stderr,none": 0.006058634002437434,
						"alias": " - lambada_openai",
						"perplexity,none": 3.168243885148654,
						"perplexity_stderr,none": 0.06146529860251976
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.06248787114302348,
						"acc_stderr,none": 0.0033720840032029978,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 233.21991630616725,
						"perplexity_stderr,none": 8.10310928631037
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.43993790025228025,
						"acc_stderr,none": 0.006915536116983778,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 32.139720973669235,
						"perplexity_stderr,none": 1.7811345083380186
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7483019600232874,
						"acc_stderr,none": 0.006046310291269681,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.1668112300896105,
						"perplexity_stderr,none": 0.06145395991522651
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.47137589753541626,
						"acc_stderr,none": 0.006954553291373015,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 25.845659144590474,
						"perplexity_stderr,none": 1.2650386178988973
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5664661362313216,
						"acc_stderr,none": 0.006904155467557466,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 15.26641589752251,
						"perplexity_stderr,none": 0.739039943957898
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5328934601203182,
						"acc_stderr,none": 0.006950887218847425,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 20.337622905485258,
						"perplexity_stderr,none": 1.0731193264083911
					},
					"lambada_standard": {
						"acc,none": 0.6960993595963516,
						"acc_stderr,none": 0.006407867125328469,
						"alias": " - lambada_standard",
						"perplexity,none": 3.7548273006816935,
						"perplexity_stderr,none": 0.07457531643377946
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.07510188239860276,
						"acc_stderr,none": 0.003671845776844112,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 263.0798011425835,
						"perplexity_stderr,none": 8.597672162831305
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.33969465648854963,
						"exact_match_stderr,get-answer": 0.011948920483739104
					},
					"logiqa": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.29493087557603687,
						"acc_norm_stderr,none": 0.01788624973410439,
						"acc_stderr,none": 0.016705867034419633,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2639949109414758,
						"acc_norm,none": 0.2875318066157761,
						"acc_norm_stderr,none": 0.011419250355256812,
						"acc_stderr,none": 0.011121160118426511,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25795644891122277,
						"acc_norm,none": 0.2619765494137353,
						"acc_norm_stderr,none": 0.008049462477079312,
						"acc_stderr,none": 0.008009187907885278,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3433594577420038,
						"acc_stderr,none": 0.004886853897431385,
						"alias": "mc_taco",
						"f1,none": 0.5066836409929981,
						"f1_stderr,none": 0.005451844247468536
					},
					"medmcqa": {
						"acc,none": 0.3767630886923261,
						"acc_norm,none": 0.3767630886923261,
						"acc_norm_stderr,none": 0.007493224481197773,
						"acc_stderr,none": 0.007493224481197773,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.39120188531029065,
						"acc_norm,none": 0.39120188531029065,
						"acc_norm_stderr,none": 0.013683385527596343,
						"acc_stderr,none": 0.013683385527596343,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.4344822674832645,
						"acc_stderr,none": 0.10034267100203663,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252606,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.5185185185185185,
						"acc_stderr,none": 0.043163785995113245,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.4144736842105263,
						"acc_stderr,none": 0.04008973785779205,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.47547169811320755,
						"acc_stderr,none": 0.030735822206205615,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4652777777777778,
						"acc_stderr,none": 0.04171115858181618,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.049431107042371025,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.04461960433384741,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3872832369942196,
						"acc_stderr,none": 0.037143259063020635,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.28431372549019607,
						"acc_stderr,none": 0.04488482852329017,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.050211673156867795,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3872340425531915,
						"acc_stderr,none": 0.03184389265339525,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2719298245614035,
						"acc_stderr,none": 0.04185774424022056,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.496551724137931,
						"acc_stderr,none": 0.041665675771015785,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.31746031746031744,
						"acc_stderr,none": 0.023973861998992072,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.31746031746031744,
						"acc_stderr,none": 0.04163453031302859,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4645161290322581,
						"acc_stderr,none": 0.028372287797962956,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.37438423645320196,
						"acc_stderr,none": 0.03405155380561952,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.049756985195624284,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.6060606060606061,
						"acc_stderr,none": 0.0381549430868893,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4696969696969697,
						"acc_stderr,none": 0.03555804051763929,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.6062176165803109,
						"acc_stderr,none": 0.035260770955482405,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3974358974358974,
						"acc_stderr,none": 0.024811920017903836,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.02784081149587193,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.40336134453781514,
						"acc_stderr,none": 0.03186608121408831,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2847682119205298,
						"acc_stderr,none": 0.03684881521389023,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5376146788990825,
						"acc_stderr,none": 0.02137657527439758,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.25462962962962965,
						"acc_stderr,none": 0.02971127586000535,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5441176470588235,
						"acc_stderr,none": 0.03495624522015477,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.6666666666666666,
						"acc_stderr,none": 0.030685820596610812,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4663677130044843,
						"acc_stderr,none": 0.033481800170603065,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5343511450381679,
						"acc_stderr,none": 0.04374928560599738,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.4099893730074389,
						"acc_stderr,none": 0.11114918281073527,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.5041322314049587,
						"acc_stderr,none": 0.04564198767432754,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5648148148148148,
						"acc_stderr,none": 0.04792898170907061,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.4233128834355828,
						"acc_stderr,none": 0.03881891213334384,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.04287858751340456,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5339805825242718,
						"acc_stderr,none": 0.0493929144727348,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6581196581196581,
						"acc_stderr,none": 0.031075028526507748,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.050161355804659205,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.6066411238825032,
						"acc_stderr,none": 0.01746855672450315,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.44508670520231214,
						"acc_stderr,none": 0.026756255129663776,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.20558659217877095,
						"acc_stderr,none": 0.013516116210724202,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.45751633986928103,
						"acc_stderr,none": 0.028526383452142635,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.48632121017058255,
						"acc_stderr,none": 0.08750625836621839,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.5337620578778135,
						"acc_stderr,none": 0.028333277109562793,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.5123456790123457,
						"acc_stderr,none": 0.027812262269327235,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.32269503546099293,
						"acc_stderr,none": 0.027889139300534785,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.35723598435462844,
						"acc_stderr,none": 0.0122386157503165,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4227941176470588,
						"acc_stderr,none": 0.030008562845003486,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.43790849673202614,
						"acc_stderr,none": 0.02007125788688653,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4909090909090909,
						"acc_stderr,none": 0.04788339768702861,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.4448979591836735,
						"acc_stderr,none": 0.031814251181977865,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4848878778030549,
						"acc_stderr,none": 0.08936666591582938,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.6965174129353234,
						"acc_stderr,none": 0.03251006816458618,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3707580082461148,
						"acc_stderr,none": 0.07700670240886777,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.66,
						"acc_stderr,none": 0.04760952285695237,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.03726214354322415,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6491228070175439,
						"acc_stderr,none": 0.036602988340491624,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7955170657157412,
						"acc_stderr,none": 0.004071273307089601,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7963791700569569,
						"acc_stderr,none": 0.004061366663037754,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7352941176470589,
						"acc_stderr,none": 0.02186830575426217,
						"alias": "mrpc",
						"f1,none": 0.8322981366459627,
						"f1_stderr,none": 0.015914241561164326
					},
					"multimedqa": {
						"acc,none": 0.4106458481192335,
						"acc_norm,none": 0.3822794275176328,
						"acc_norm_stderr,none": 0.00010930790713489269,
						"acc_stderr,none": 0.06374871155445498,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5618811881188119,
						"acc_stderr,none": 0.007126588567359374,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7144469541095988,
						"mrr_stderr,none": 0.010296523088232335,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4018058690744921,
						"r@2_stderr,none": 0.016480014009503316
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6603649375670112,
						"mrr_stderr,none": 0.010407401316235017,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4762979683972912,
						"r@2_stderr,none": 0.016788421275515525
					},
					"openbookqa": {
						"acc,none": 0.31,
						"acc_norm,none": 0.426,
						"acc_norm_stderr,none": 0.022136577335085634,
						"acc_stderr,none": 0.020704041021724805,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.401,
						"acc_stderr,none": 0.010961732517713431,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.375,
						"acc_stderr,none": 0.010828024891988879,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.404,
						"acc_stderr,none": 0.010975072943404668,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5455,
						"acc_stderr,none": 0.011136735987003715,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.011160209457602892,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5315,
						"acc_stderr,none": 0.011160921022883272,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5255,
						"acc_stderr,none": 0.011168582883330069,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4735,
						"acc_stderr,none": 0.058882214166930576,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7774755168661589,
						"acc_norm,none": 0.7823721436343852,
						"acc_norm_stderr,none": 0.009627407474840883,
						"acc_stderr,none": 0.009704600975718238,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2724701110162254,
						"acc_norm,none": 0.2957408198121264,
						"acc_norm_stderr,none": 0.003334226093221727,
						"acc_stderr,none": 0.0032528048262600094,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.64,
						"acc_stderr,none": 0.021487751089720526,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7501462337982135,
						"acc_norm,none": 0.6258412991530274,
						"acc_norm_stderr,none": 0.008326494163455439,
						"acc_stderr,none": 0.15286284124750957,
						"alias": "pythia",
						"bits_per_byte,none": 0.6390060682476366,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5572559322219164,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.168243885148654,
						"perplexity_stderr,none": 0.06146529860251976,
						"word_perplexity,none": 10.681581847528923,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.375886524822695,
						"acc_norm,none": 0.42730496453900707,
						"acc_norm_stderr,none": 0.05482289826213123,
						"acc_stderr,none": 0.04158968534104432,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.525,
						"acc_norm_stderr,none": 0.045777595341980594,
						"acc_stderr,none": 0.0451938453788867,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.33125,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.03920394987159571,
						"acc_stderr,none": 0.03732598513993524,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.38380281690140844,
						"acc_norm,none": 0.3873239436619718,
						"acc_norm_stderr,none": 0.028957389575950957,
						"acc_stderr,none": 0.028908177688046176,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7711105614642592,
						"acc_stderr,none": 0.0020894149749062898,
						"alias": "qqp",
						"f1,none": 0.7305497321220592,
						"f1_stderr,none": 0.0026921082129103893
					},
					"race": {
						"acc,none": 0.3559808612440191,
						"acc_stderr,none": 0.014818780400538124,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2594,
						"em_stderr,none": 0.004383273355442427,
						"f1,none": 0.2689585716724396,
						"f1_stderr,none": 0.00439626356128236
					},
					"rte": {
						"acc,none": 0.703971119133574,
						"acc_stderr,none": 0.02747830386297935,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.934,
						"acc_norm,none": 0.929,
						"acc_norm_stderr,none": 0.00812557844248791,
						"acc_stderr,none": 0.007855297938697598,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6967509025270758,
						"acc_stderr,none": 0.02766839629359371,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8853211009174312,
						"acc_stderr,none": 0.010796502452107722,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5860241927421773,
						"acc_norm,none": 0.775517344796561,
						"acc_norm_stderr,none": 0.002949971873137297,
						"acc_stderr,none": 0.0034823785223763396,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6642041862167648,
						"acc_stderr,none": 0.07279024716867014,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5691105769230769,
						"acc_stderr,none": 0.004956221528530022,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.851525286307895,
						"acc_stderr,none": 0.0035797645848129407,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.576078431372549,
						"acc_stderr,none": 0.004893332969233632,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.34011384289858004,
						"acc_stderr,none": 0.0014771726697635596,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -7.980583337371578,
						"bleu_diff_stderr,none": 0.852897458608575,
						"bleu_max,none": 26.551295548659205,
						"bleu_max_stderr,none": 0.8011498231316072,
						"rouge1_acc,none": 0.28886168910648713,
						"rouge1_acc_stderr,none": 0.015866346401384304,
						"rouge1_diff,none": -9.821103815997025,
						"rouge1_diff_stderr,none": 0.9028693256633942,
						"rouge1_max,none": 52.182254980688725,
						"rouge1_max_stderr,none": 0.8530280101906371,
						"rouge2_acc,none": 0.25458996328029376,
						"rouge2_acc_stderr,none": 0.01525011707915649,
						"rouge2_diff,none": -11.491573113934427,
						"rouge2_diff_stderr,none": 1.1000281924780684,
						"rouge2_max,none": 36.39506840157793,
						"rouge2_max_stderr,none": 0.9973160949093164,
						"rougeL_acc,none": 0.29008567931456547,
						"rougeL_acc_stderr,none": 0.01588623687420952,
						"rougeL_diff,none": -10.04885259335416,
						"rougeL_diff_stderr,none": 0.9171809739466521,
						"rougeL_max,none": 49.24188675732332,
						"rougeL_max_stderr,none": 0.8724251057774257
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -7.980583337371578,
						"bleu_diff_stderr,none": 0.852897458608575,
						"bleu_max,none": 26.551295548659205,
						"bleu_max_stderr,none": 0.8011498231316072,
						"rouge1_acc,none": 0.28886168910648713,
						"rouge1_acc_stderr,none": 0.015866346401384304,
						"rouge1_diff,none": -9.821103815997025,
						"rouge1_diff_stderr,none": 0.9028693256633942,
						"rouge1_max,none": 52.182254980688725,
						"rouge1_max_stderr,none": 0.8530280101906371,
						"rouge2_acc,none": 0.25458996328029376,
						"rouge2_acc_stderr,none": 0.01525011707915649,
						"rouge2_diff,none": -11.491573113934427,
						"rouge2_diff_stderr,none": 1.1000281924780684,
						"rouge2_max,none": 36.39506840157793,
						"rouge2_max_stderr,none": 0.9973160949093164,
						"rougeL_acc,none": 0.29008567931456547,
						"rougeL_acc_stderr,none": 0.01588623687420952,
						"rougeL_diff,none": -10.04885259335416,
						"rougeL_diff_stderr,none": 0.9171809739466521,
						"rougeL_max,none": 49.24188675732332,
						"rougeL_max_stderr,none": 0.8724251057774257
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2692778457772338,
						"acc_stderr,none": 0.015528566637087281,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4109498400199263,
						"acc_stderr,none": 0.01425045190861715,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.047244094488188976,
						"exact_match_stderr,none": 0.004707709194633815
					},
					"wic": {
						"acc,none": 0.567398119122257,
						"acc_stderr,none": 0.019629915558485096,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6390060682476366,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5572559322219164,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.681581847528923,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7308602999210734,
						"acc_stderr,none": 0.012464911951268736,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8644688644688645,
						"acc_stderr,none": 0.020754380015466267,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6256363636363637,
						"acc_stderr,none": 0.07173344559836263,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.02191237788577997,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.022342748192502843,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.02035437548053008,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.019536923574747598,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.02238235778196214,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.02217510926561317,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.562,
						"acc_stderr,none": 0.022210326363977417,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.592,
						"acc_stderr,none": 0.02200091089387719,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.021352091786223104,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.020186703693570843,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177524,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4378580990629183,
						"acc_stderr,none": 0.046141989397628855,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3353413654618474,
						"acc_stderr,none": 0.009463034891512706,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.46184738955823296,
						"acc_stderr,none": 0.009992853579749966,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4883534136546185,
						"acc_stderr,none": 0.010019353650807717,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.42610441767068274,
						"acc_stderr,none": 0.009912016377459075,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5369477911646586,
						"acc_stderr,none": 0.009994672360002298,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.46987951807228917,
						"acc_stderr,none": 0.010003871419517729,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4819277108433735,
						"acc_stderr,none": 0.01001552415662981,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43172690763052207,
						"acc_stderr,none": 0.009928203186112922,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4827309236947791,
						"acc_stderr,none": 0.010016093498409704,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.41767068273092367,
						"acc_stderr,none": 0.009885277727840171,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41887550200803214,
						"acc_stderr,none": 0.009889278882314556,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4506024096385542,
						"acc_stderr,none": 0.009973042774811678,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40883534136546185,
						"acc_stderr,none": 0.009854078067810778,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41285140562248995,
						"acc_stderr,none": 0.00986866594308441,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3441767068273092,
						"acc_stderr,none": 0.009522954469806038,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6344383611094399,
						"acc_stderr,none": 0.060996520625444285,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5976174718729318,
						"acc_stderr,none": 0.012619516819528715,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7763070814030444,
						"acc_stderr,none": 0.010723941055690177,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7213765718067505,
						"acc_stderr,none": 0.011537224908075903,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5784248841826605,
						"acc_stderr,none": 0.012707862131801903,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6035737921906023,
						"acc_stderr,none": 0.012588033568434754,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6737260092653872,
						"acc_stderr,none": 0.012065474625979069,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5400397088021178,
						"acc_stderr,none": 0.01282580237008399,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6902713434811383,
						"acc_stderr,none": 0.011899045981288764,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5598941098610192,
						"acc_stderr,none": 0.012774475160716335,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.585704831237591,
						"acc_stderr,none": 0.012676689821720669,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6518861681005956,
						"acc_stderr,none": 0.012259084803727359,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8136659923578332,
						"acc_stderr,none": 0.038359101094717726,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8769892473118279,
						"acc_stderr,none": 0.0068131917265157995,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7309697601668405,
						"acc_stderr,none": 0.014327403771784453,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.8098859315589354,
						"acc_stderr,none": 0.02424199792595853,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.7015873015873015,
						"acc_stderr,none": 0.02582169136035425,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7678571428571429,
						"acc_stderr,none": 0.018824952299180426,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/225-EagleX-PreFT-A"
	},
	"./rwkv-x-dev/225-EagleX-PreFT-B": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6417700112739572,
						"acc_norm,none": 0.6299323562570462,
						"acc_norm_stderr,none": 0.08276312799535909,
						"acc_stderr,none": 0.10579317717513657,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4996875,
						"acc_stderr,none": 0.05156320394263091,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.2373,
						"acc_stderr,none": 0.2445273693930433,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8252686567164179,
						"acc_stderr,none": 0.16004588039050055,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.26225854383358105,
						"acc_norm,none": 0.26225854383358105,
						"acc_norm_stderr,none": 0.11487203038139629,
						"acc_stderr,none": 0.11487203038139629,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2912277672250043,
						"acc_norm,none": 0.2912277672250043,
						"acc_norm_stderr,none": 0.05142497524329903,
						"acc_stderr,none": 0.05142497524329903,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.6606850029815146,
						"likelihood_diff_stderr,none": 0.5518449174548645,
						"pct_stereotype,none": 0.6137447823494335,
						"pct_stereotype_stderr,none": 0.07106435284906429
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.06742125984251969,
						"exact_match_stderr,none": 0.005563988522062476
					},
					"glue": {
						"acc,none": 0.7219509290138161,
						"acc_stderr,none": 0.0038517239690826193,
						"alias": "glue",
						"f1,none": 0.7129031094716921,
						"f1_stderr,none": 0.00010461379605454086,
						"mcc,none": 0.1789598310948066,
						"mcc_stderr,none": 0.028345902875722893
					},
					"kmmlu": {
						"acc,none": 0.25082298585041884,
						"acc_norm,none": 0.25082298585041884,
						"acc_norm_stderr,none": 0.02298340376283324,
						"acc_stderr,none": 0.02298340376283324,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5788204341153256,
						"acc_norm,none": 0.546,
						"acc_norm_stderr,none": 0.0004967615230460869,
						"acc_stderr,none": 0.0638042893583508,
						"alias": "kobest",
						"f1,none": 0.5600668598924817,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.723850184358626,
						"acc_stderr,none": 0.017083572537623964,
						"alias": "lambada",
						"perplexity,none": 3.4380992442173275,
						"perplexity_stderr,none": 0.17573940491170567
					},
					"lambada_cloze": {
						"acc,none": 0.06588395109644866,
						"acc_stderr,none": 0.004486889276645239,
						"alias": "lambada_cloze",
						"perplexity,none": 280.56928848684856,
						"perplexity_stderr,none": 16.718657743261268
					},
					"lambada_multilingual": {
						"acc,none": 0.5515233844362508,
						"acc_stderr,none": 0.08421959613535354,
						"alias": "lambada_multilingual",
						"perplexity,none": 19.399493897596383,
						"perplexity_stderr,none": 7.578457941293386
					},
					"mmlu": {
						"acc,none": 0.4203817120068366,
						"acc_stderr,none": 0.09419170871611754,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.40085015940488844,
						"acc_stderr,none": 0.10356667225531478,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.47473447055037016,
						"acc_stderr,none": 0.08028084119759857,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.46798830029249266,
						"acc_stderr,none": 0.07680576830543599,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3495084046939423,
						"acc_stderr,none": 0.07570622960163205,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.39957416607523066,
						"acc_norm,none": 0.3764752691049997,
						"acc_norm_stderr,none": 0.00010913788810020364,
						"acc_stderr,none": 0.06262570773780589,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.476,
						"acc_stderr,none": 0.04950308366918228,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7494161591545646,
						"acc_norm,none": 0.6341794517193212,
						"acc_norm_stderr,none": 0.00889804306834689,
						"acc_stderr,none": 0.15241228879380808,
						"alias": "pythia",
						"bits_per_byte,none": 0.6366215629081775,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.554684205012165,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.1130732111761272,
						"perplexity_stderr,none": 0.06018854865655288,
						"word_perplexity,none": 10.58759040597816,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3829787234042553,
						"acc_norm,none": 0.43439716312056736,
						"acc_norm_stderr,none": 0.057893785323336576,
						"acc_stderr,none": 0.04294560586113103,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6689627633023859,
						"acc_stderr,none": 0.07478236276341386,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.33963924222890907,
						"acc_stderr,none": 0.0014597213029538807,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32558139534883723,
						"bleu_acc_stderr,none": 0.016403989469907832,
						"bleu_diff,none": -7.210349986291059,
						"bleu_diff_stderr,none": 0.8473025797244564,
						"bleu_max,none": 26.91417887164884,
						"bleu_max_stderr,none": 0.8065767641511207,
						"rouge1_acc,none": 0.29008567931456547,
						"rouge1_acc_stderr,none": 0.01588623687420952,
						"rouge1_diff,none": -9.317339793504233,
						"rouge1_diff_stderr,none": 0.9164890367831899,
						"rouge1_max,none": 52.24778120923728,
						"rouge1_max_stderr,none": 0.8645590833409083,
						"rouge2_acc,none": 0.2594859241126071,
						"rouge2_acc_stderr,none": 0.015345409485557985,
						"rouge2_diff,none": -10.825591276141028,
						"rouge2_diff_stderr,none": 1.1076795442544722,
						"rouge2_max,none": 36.578614921244075,
						"rouge2_max_stderr,none": 1.0147701724257248,
						"rougeL_acc,none": 0.2876376988984088,
						"rougeL_acc_stderr,none": 0.015846315101394812,
						"rougeL_diff,none": -9.603219847920986,
						"rougeL_diff_stderr,none": 0.9272381302328431,
						"rougeL_max,none": 49.18494814457406,
						"rougeL_max_stderr,none": 0.8853992254050823
					},
					"xcopa": {
						"acc,none": 0.6216363636363637,
						"acc_stderr,none": 0.07042376634102472,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4376974564926372,
						"acc_stderr,none": 0.045281056535284886,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6337765477408098,
						"acc_stderr,none": 0.05452322084586655,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8168127669139132,
						"acc_stderr,none": 0.039279035099497324,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6417700112739572,
						"acc_norm,none": 0.6299323562570462,
						"acc_norm_stderr,none": 0.08276312799535909,
						"acc_stderr,none": 0.10579317717513657,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4996875,
						"acc_stderr,none": 0.05156320394263091,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.01545972195749338,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.015740004693383845,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.4525,
						"acc_stderr,none": 0.01437446739050299,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4180887372013652,
						"acc_norm,none": 0.45563139931740615,
						"acc_norm_stderr,none": 0.01455374993930687,
						"acc_stderr,none": 0.014413988396996083,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.752104377104377,
						"acc_norm,none": 0.7159090909090909,
						"acc_norm_stderr,none": 0.009253921261885768,
						"acc_stderr,none": 0.00886016236146403,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.2373,
						"acc_stderr,none": 0.2445273693930433,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.2315,
						"acc_stderr,none": 0.00943389496375141,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.575,
						"acc_stderr,none": 0.011056609982818337,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.093,
						"acc_stderr,none": 0.006495890878020451,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.9615,
						"acc_stderr,none": 0.004303270159661528,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.031,
						"acc_stderr,none": 0.0038764692062175188,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.196,
						"acc_stderr,none": 0.008878705745087713,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0135,
						"acc_stderr,none": 0.0025811249685073067,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.1385,
						"acc_stderr,none": 0.007725847484883472,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.011,
						"acc_stderr,none": 0.0023328568559933755,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.121,
						"acc_stderr,none": 0.007294251370190568,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.004772234273318872,
						"acc_stderr,none": 0.0014357568013434105,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8252686567164179,
						"acc_stderr,none": 0.16004588039050055,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491127,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403633,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578028,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248116,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.0077436402269193145,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.01248626873437014,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.749,
						"acc_stderr,none": 0.013718133516888926,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.0072129762946392395,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074796,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319424,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.00655881224140611,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318215,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163039,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919315,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697068,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611462,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474918,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665544,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.679,
						"acc_stderr,none": 0.014770821817934642,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.014683991951087976,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557424,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408032,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178315,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.476,
						"acc_stderr,none": 0.015801065586651758,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592086,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731966,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.603,
						"acc_stderr,none": 0.015480007449307987,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.741,
						"acc_stderr,none": 0.013860415257527911,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.010811655372416051,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474922,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.00872852720607478,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240655,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.012864077288499315,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286434,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.014758652303574883,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.647,
						"acc_stderr,none": 0.015120172605483708,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.607,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.703,
						"acc_stderr,none": 0.014456832294801098,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.015813097547730987,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855766,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523717,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557843998,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578185,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653874,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.971,
						"acc_stderr,none": 0.005309160685756983,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785146,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.727,
						"acc_stderr,none": 0.014095022868717588,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.012559527926707396,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.0060491811505849384,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783252,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469308,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.614,
						"acc_stderr,none": 0.015402637476784383,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.543,
						"acc_stderr,none": 0.015760691590136378,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665533,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319415,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.641,
						"acc_stderr,none": 0.01517726422479859,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.01126614068463217,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942326,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.326,
						"acc_stderr,none": 0.014830507204541047,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074794,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504403,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410041,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.391,
						"acc_stderr,none": 0.015438826294681782,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.402,
						"acc_stderr,none": 0.015512467135715071,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6825688073394496,
						"acc_stderr,none": 0.008141240022609394,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.04459412925079224,
						"alias": "cb",
						"f1,none": 0.6075533661740559,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.26225854383358105,
						"acc_norm,none": 0.26225854383358105,
						"acc_norm_stderr,none": 0.11487203038139629,
						"acc_stderr,none": 0.11487203038139629,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.07872958216222171,
						"acc_stderr,none": 0.07872958216222171,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.0659529705144534,
						"acc_stderr,none": 0.0659529705144534,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445796,
						"acc_stderr,none": 0.06180629713445796,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063838,
						"acc_stderr,none": 0.07213122508063838,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.1008316903303367,
						"acc_stderr,none": 0.1008316903303367,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.11236664374387367,
						"acc_stderr,none": 0.11236664374387367,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.08742975048915691,
						"acc_stderr,none": 0.08742975048915691,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.07335878043508444,
						"acc_stderr,none": 0.07335878043508444,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.32608695652173914,
						"acc_norm,none": 0.32608695652173914,
						"acc_norm_stderr,none": 0.06988152725357213,
						"acc_stderr,none": 0.06988152725357213,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2912277672250043,
						"acc_norm,none": 0.2912277672250043,
						"acc_norm_stderr,none": 0.05142497524329903,
						"acc_stderr,none": 0.05142497524329903,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2958579881656805,
						"acc_norm,none": 0.2958579881656805,
						"acc_norm_stderr,none": 0.035214144124964784,
						"acc_stderr,none": 0.035214144124964784,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.036628698766429046,
						"acc_stderr,none": 0.036628698766429046,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.39375,
						"acc_norm,none": 0.39375,
						"acc_norm_stderr,none": 0.03874695666685831,
						"acc_stderr,none": 0.03874695666685831,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03453131801885415,
						"acc_stderr,none": 0.03453131801885415,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.33014354066985646,
						"acc_norm,none": 0.33014354066985646,
						"acc_norm_stderr,none": 0.03260698244181308,
						"acc_stderr,none": 0.03260698244181308,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03489370652018761,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2900763358778626,
						"acc_norm,none": 0.2900763358778626,
						"acc_norm_stderr,none": 0.03980066246467766,
						"acc_stderr,none": 0.03980066246467766,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3014705882352941,
						"acc_norm,none": 0.3014705882352941,
						"acc_norm_stderr,none": 0.039495529298273935,
						"acc_stderr,none": 0.039495529298273935,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.04556837693674772,
						"acc_stderr,none": 0.04556837693674772,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.29102167182662536,
						"acc_norm,none": 0.29102167182662536,
						"acc_norm_stderr,none": 0.02531344242805741,
						"acc_stderr,none": 0.02531344242805741,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.03166009679399812,
						"acc_stderr,none": 0.03166009679399812,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3240223463687151,
						"acc_norm,none": 0.3240223463687151,
						"acc_norm_stderr,none": 0.03507871288800094,
						"acc_stderr,none": 0.03507871288800094,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24472573839662448,
						"acc_norm,none": 0.24472573839662448,
						"acc_norm_stderr,none": 0.027985699387036402,
						"acc_stderr,none": 0.027985699387036402,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800374,
						"acc_stderr,none": 0.04142972007800374,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.40186915887850466,
						"acc_norm,none": 0.40186915887850466,
						"acc_norm_stderr,none": 0.04761979313593575,
						"acc_stderr,none": 0.04761979313593575,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.046216787599682646,
						"acc_stderr,none": 0.046216787599682646,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809826,
						"acc_stderr,none": 0.039578354719809826,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.045136767181683086,
						"acc_stderr,none": 0.045136767181683086,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2600732600732601,
						"acc_norm,none": 0.2600732600732601,
						"acc_norm_stderr,none": 0.02659853762760147,
						"acc_stderr,none": 0.02659853762760147,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.032834720561085676,
						"acc_stderr,none": 0.032834720561085676,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.033773102522091945,
						"acc_stderr,none": 0.033773102522091945,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.03653847510896056,
						"acc_stderr,none": 0.03653847510896056,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.03694846055443904,
						"acc_stderr,none": 0.03694846055443904,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3270440251572327,
						"acc_norm,none": 0.3270440251572327,
						"acc_norm_stderr,none": 0.0373222564649312,
						"acc_stderr,none": 0.0373222564649312,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3374233128834356,
						"acc_norm,none": 0.3374233128834356,
						"acc_norm_stderr,none": 0.03714908409935573,
						"acc_stderr,none": 0.03714908409935573,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.034300856070148815,
						"acc_stderr,none": 0.034300856070148815,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.27380952380952384,
						"acc_norm,none": 0.27380952380952384,
						"acc_norm_stderr,none": 0.028145741115683864,
						"acc_stderr,none": 0.028145741115683864,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.26262626262626265,
						"acc_norm,none": 0.26262626262626265,
						"acc_norm_stderr,none": 0.031353050095330855,
						"acc_stderr,none": 0.031353050095330855,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.42016806722689076,
						"acc_norm,none": 0.42016806722689076,
						"acc_norm_stderr,none": 0.03206183783236152,
						"acc_stderr,none": 0.03206183783236152,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26521739130434785,
						"acc_norm,none": 0.26521739130434785,
						"acc_norm_stderr,none": 0.02917176407847258,
						"acc_stderr,none": 0.02917176407847258,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.038532548365520024,
						"acc_stderr,none": 0.038532548365520024,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2840909090909091,
						"acc_norm,none": 0.2840909090909091,
						"acc_norm_stderr,none": 0.034090909090909075,
						"acc_stderr,none": 0.034090909090909075,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.28859060402684567,
						"acc_norm,none": 0.28859060402684567,
						"acc_norm_stderr,none": 0.03724517629698769,
						"acc_stderr,none": 0.03724517629698769,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3135593220338983,
						"acc_norm,none": 0.3135593220338983,
						"acc_norm_stderr,none": 0.04289122333662572,
						"acc_stderr,none": 0.04289122333662572,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.03304756158810786,
						"acc_stderr,none": 0.03304756158810786,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.03822127078536156,
						"acc_stderr,none": 0.03822127078536156,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2698412698412698,
						"acc_norm,none": 0.2698412698412698,
						"acc_norm_stderr,none": 0.03970158273235172,
						"acc_stderr,none": 0.03970158273235172,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.313953488372093,
						"acc_norm,none": 0.313953488372093,
						"acc_norm_stderr,none": 0.035490439822271735,
						"acc_stderr,none": 0.035490439822271735,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26277372262773724,
						"acc_norm,none": 0.26277372262773724,
						"acc_norm_stderr,none": 0.021736991810864862,
						"acc_stderr,none": 0.021736991810864862,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.38317757009345793,
						"acc_norm,none": 0.38317757009345793,
						"acc_norm_stderr,none": 0.03331120297324245,
						"acc_stderr,none": 0.03331120297324245,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2845528455284553,
						"acc_norm,none": 0.2845528455284553,
						"acc_norm_stderr,none": 0.04084983733239223,
						"acc_stderr,none": 0.04084983733239223,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2786885245901639,
						"acc_norm,none": 0.2786885245901639,
						"acc_norm_stderr,none": 0.04075944659069252,
						"acc_stderr,none": 0.04075944659069252,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3380952380952381,
						"acc_norm,none": 0.3380952380952381,
						"acc_norm_stderr,none": 0.03272232371404439,
						"acc_stderr,none": 0.03272232371404439,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.31666666666666665,
						"acc_norm,none": 0.31666666666666665,
						"acc_norm_stderr,none": 0.034768900963930385,
						"acc_stderr,none": 0.034768900963930385,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.30687830687830686,
						"acc_norm,none": 0.30687830687830686,
						"acc_norm_stderr,none": 0.03363635410184865,
						"acc_stderr,none": 0.03363635410184865,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.04167808180844153,
						"acc_stderr,none": 0.04167808180844153,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.31724137931034485,
						"acc_norm,none": 0.31724137931034485,
						"acc_norm_stderr,none": 0.038783523721386215,
						"acc_stderr,none": 0.038783523721386215,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.04429811949614585,
						"acc_stderr,none": 0.04429811949614585,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2914285714285714,
						"acc_norm,none": 0.2914285714285714,
						"acc_norm_stderr,none": 0.0344495265622902,
						"acc_stderr,none": 0.0344495265622902,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767256,
						"acc_stderr,none": 0.030113040167767256,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2526595744680851,
						"acc_norm,none": 0.2526595744680851,
						"acc_norm_stderr,none": 0.022439412582786405,
						"acc_stderr,none": 0.022439412582786405,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.33189655172413796,
						"acc_norm,none": 0.33189655172413796,
						"acc_norm_stderr,none": 0.03098255553570088,
						"acc_stderr,none": 0.03098255553570088,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3275862068965517,
						"acc_norm,none": 0.3275862068965517,
						"acc_norm_stderr,none": 0.03568272877241247,
						"acc_stderr,none": 0.03568272877241247,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3185840707964602,
						"acc_norm,none": 0.3185840707964602,
						"acc_norm_stderr,none": 0.031061820840326118,
						"acc_stderr,none": 0.031061820840326118,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.036085410115739666,
						"acc_stderr,none": 0.036085410115739666,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.032092816451453864,
						"acc_stderr,none": 0.032092816451453864,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2781065088757396,
						"acc_norm,none": 0.2781065088757396,
						"acc_norm_stderr,none": 0.034569054303762434,
						"acc_stderr,none": 0.034569054303762434,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2670807453416149,
						"acc_norm,none": 0.2670807453416149,
						"acc_norm_stderr,none": 0.03497754822823695,
						"acc_stderr,none": 0.03497754822823695,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.30625,
						"acc_norm,none": 0.30625,
						"acc_norm_stderr,none": 0.036554511504337694,
						"acc_stderr,none": 0.036554511504337694,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.1789598310948066,
						"mcc_stderr,none": 0.028345902875722893
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.03487350880197771,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.6606850029815146,
						"likelihood_diff_stderr,none": 0.5518449174548645,
						"pct_stereotype,none": 0.6137447823494335,
						"pct_stereotype_stderr,none": 0.07106435284906429
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.937984496124031,
						"likelihood_diff_stderr,none": 0.09277581202551341,
						"pct_stereotype,none": 0.6434108527131783,
						"pct_stereotype_stderr,none": 0.01170014501583026
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.027472527472527,
						"likelihood_diff_stderr,none": 0.4019222840643592,
						"pct_stereotype,none": 0.6813186813186813,
						"pct_stereotype_stderr,none": 0.04911704114831279
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.738636363636363,
						"likelihood_diff_stderr,none": 1.5431717766442612,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.088461538461538,
						"likelihood_diff_stderr,none": 0.6129960405337536,
						"pct_stereotype,none": 0.7846153846153846,
						"pct_stereotype_stderr,none": 0.05138611236879767
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.76484375,
						"likelihood_diff_stderr,none": 0.16488552960794906,
						"pct_stereotype,none": 0.61875,
						"pct_stereotype_stderr,none": 0.02719363040277548
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.8952546296296298,
						"likelihood_diff_stderr,none": 0.26245060283327387,
						"pct_stereotype,none": 0.5787037037037037,
						"pct_stereotype_stderr,none": 0.03367462138896078
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.557291666666667,
						"likelihood_diff_stderr,none": 0.38830197360608043,
						"pct_stereotype,none": 0.7777777777777778,
						"pct_stereotype_stderr,none": 0.04933922619854289
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.7886318897637796,
						"likelihood_diff_stderr,none": 0.166774219915271,
						"pct_stereotype,none": 0.5551181102362205,
						"pct_stereotype_stderr,none": 0.022070444592370703
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.167792792792793,
						"likelihood_diff_stderr,none": 0.3851163958447136,
						"pct_stereotype,none": 0.7567567567567568,
						"pct_stereotype_stderr,none": 0.04090743073860919
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.329301075268817,
						"likelihood_diff_stderr,none": 0.48511318035324685,
						"pct_stereotype,none": 0.8924731182795699,
						"pct_stereotype_stderr,none": 0.03229700003364003
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.386842105263158,
						"likelihood_diff_stderr,none": 0.2468827997798868,
						"pct_stereotype,none": 0.6894736842105263,
						"pct_stereotype_stderr,none": 0.03365713545671698
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.3801431127012522,
						"likelihood_diff_stderr,none": 0.07807657001278437,
						"pct_stereotype,none": 0.5837805605247466,
						"pct_stereotype_stderr,none": 0.012040623801379567
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.323611111111111,
						"likelihood_diff_stderr,none": 0.3038384179022945,
						"pct_stereotype,none": 0.6333333333333333,
						"pct_stereotype_stderr,none": 0.051080705280321645
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.826923076923077,
						"likelihood_diff_stderr,none": 0.882243196176208,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.174242424242424,
						"likelihood_diff_stderr,none": 0.49510923004787954,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.056159743502623156
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.866043613707165,
						"likelihood_diff_stderr,none": 0.13667787802781617,
						"pct_stereotype,none": 0.6230529595015576,
						"pct_stereotype_stderr,none": 0.02709116375533661
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.6462450592885376,
						"likelihood_diff_stderr,none": 0.2054105700849584,
						"pct_stereotype,none": 0.4031620553359684,
						"pct_stereotype_stderr,none": 0.03090066088529185
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.4097222222222223,
						"likelihood_diff_stderr,none": 0.38102571250824885,
						"pct_stereotype,none": 0.6388888888888888,
						"pct_stereotype_stderr,none": 0.057003814617008604
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.1396739130434783,
						"likelihood_diff_stderr,none": 0.15827612898648918,
						"pct_stereotype,none": 0.49782608695652175,
						"pct_stereotype_stderr,none": 0.023337780813399874
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.3065217391304347,
						"likelihood_diff_stderr,none": 0.27526876245997023,
						"pct_stereotype,none": 0.7217391304347827,
						"pct_stereotype_stderr,none": 0.04197239673902095
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.1923076923076925,
						"likelihood_diff_stderr,none": 0.32367125111992934,
						"pct_stereotype,none": 0.7582417582417582,
						"pct_stereotype_stderr,none": 0.04513082148355003
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.061224489795919,
						"likelihood_diff_stderr,none": 0.24752088728456084,
						"pct_stereotype,none": 0.7040816326530612,
						"pct_stereotype_stderr,none": 0.03268738384505799
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.06742125984251969,
						"exact_match_stderr,none": 0.005563988522062476
					},
					"glue": {
						"acc,none": 0.7219509290138161,
						"acc_stderr,none": 0.0038517239690826193,
						"alias": "glue",
						"f1,none": 0.7129031094716921,
						"f1_stderr,none": 0.00010461379605454086,
						"mcc,none": 0.1789598310948066,
						"mcc_stderr,none": 0.028345902875722893
					},
					"hellaswag": {
						"acc,none": 0.5570603465445131,
						"acc_norm,none": 0.7464648476399124,
						"acc_norm_stderr,none": 0.0043414548418923265,
						"acc_stderr,none": 0.004957182635381801,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.25082298585041884,
						"acc_norm,none": 0.25082298585041884,
						"acc_norm_stderr,none": 0.02298340376283324,
						"acc_stderr,none": 0.02298340376283324,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.04163331998932269,
						"acc_stderr,none": 0.04163331998932269,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.01377220656516854,
						"acc_stderr,none": 0.01377220656516854,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.258,
						"acc_norm,none": 0.258,
						"acc_norm_stderr,none": 0.013842963108656603,
						"acc_stderr,none": 0.013842963108656603,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.013394902889660009,
						"acc_stderr,none": 0.013394902889660009,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.014221154708434937,
						"acc_stderr,none": 0.014221154708434937,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.25166666666666665,
						"acc_norm,none": 0.25166666666666665,
						"acc_norm_stderr,none": 0.017731561494907167,
						"acc_stderr,none": 0.017731561494907167,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.013394902889660014,
						"acc_stderr,none": 0.013394902889660014,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.302,
						"acc_norm,none": 0.302,
						"acc_norm_stderr,none": 0.01452608023545955,
						"acc_stderr,none": 0.01452608023545955,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.01353152253451544,
						"acc_stderr,none": 0.01353152253451544,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.03048807329211421,
						"acc_stderr,none": 0.03048807329211421,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740668,
						"acc_stderr,none": 0.014142984975740668,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.16923076923076924,
						"acc_norm,none": 0.16923076923076924,
						"acc_norm_stderr,none": 0.03301300142947339,
						"acc_stderr,none": 0.03301300142947339,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.04461960433384741,
						"acc_stderr,none": 0.04461960433384741,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.013550631705555961,
						"acc_stderr,none": 0.013550631705555961,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.01397996564514516,
						"acc_stderr,none": 0.01397996564514516,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.01366318713487765,
						"acc_stderr,none": 0.01366318713487765,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314123,
						"acc_stderr,none": 0.013644675781314123,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.013736254390651145,
						"acc_stderr,none": 0.013736254390651145,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.233,
						"acc_norm,none": 0.233,
						"acc_norm_stderr,none": 0.013374972519220056,
						"acc_stderr,none": 0.013374972519220056,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145155,
						"acc_stderr,none": 0.013979965645145155,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.013736254390651152,
						"acc_stderr,none": 0.013736254390651152,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750641,
						"acc_stderr,none": 0.013626065817750641,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.279,
						"acc_norm,none": 0.279,
						"acc_norm_stderr,none": 0.01419015011761203,
						"acc_stderr,none": 0.01419015011761203,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.01351231225892083,
						"acc_stderr,none": 0.01351231225892083,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.227,
						"acc_norm,none": 0.227,
						"acc_norm_stderr,none": 0.013253174964763928,
						"acc_stderr,none": 0.013253174964763928,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.01355063170555596,
						"acc_stderr,none": 0.01355063170555596,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.236,
						"acc_norm,none": 0.236,
						"acc_norm_stderr,none": 0.013434451402438683,
						"acc_stderr,none": 0.013434451402438683,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.01792210934401689,
						"acc_stderr,none": 0.01792210934401689,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.237,
						"acc_norm,none": 0.237,
						"acc_norm_stderr,none": 0.013454070462577943,
						"acc_stderr,none": 0.013454070462577943,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.013912208651021355,
						"acc_stderr,none": 0.013912208651021355,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.013493000446937601,
						"acc_stderr,none": 0.013493000446937601,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774164,
						"acc_stderr,none": 0.013877773329774164,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.040936018074033256,
						"acc_stderr,none": 0.040936018074033256,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23333333333333334,
						"acc_norm,none": 0.23333333333333334,
						"acc_norm_stderr,none": 0.024459979523511415,
						"acc_stderr,none": 0.024459979523511415,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.013736254390651145,
						"acc_stderr,none": 0.013736254390651145,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750636,
						"acc_stderr,none": 0.013626065817750636,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.013531522534515434,
						"acc_stderr,none": 0.013531522534515434,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.030056479497755487,
						"acc_stderr,none": 0.030056479497755487,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.217,
						"acc_norm,none": 0.217,
						"acc_norm_stderr,none": 0.01304151375727071,
						"acc_stderr,none": 0.01304151375727071,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259738,
						"acc_stderr,none": 0.013929286594259738,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.205,
						"acc_norm,none": 0.205,
						"acc_norm_stderr,none": 0.02861764926136019,
						"acc_stderr,none": 0.02861764926136019,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.014236526215291352,
						"acc_stderr,none": 0.014236526215291352,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5788204341153256,
						"acc_norm,none": 0.546,
						"acc_norm_stderr,none": 0.0004967615230460869,
						"acc_stderr,none": 0.0638042893583508,
						"alias": "kobest",
						"f1,none": 0.5600668598924817,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.6232193732193733,
						"acc_stderr,none": 0.012937069737310833,
						"alias": " - kobest_boolq",
						"f1,none": 0.5852118099554839,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.639,
						"acc_stderr,none": 0.015195720118175113,
						"alias": " - kobest_copa",
						"f1,none": 0.6379046616013441,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.428,
						"acc_norm,none": 0.546,
						"acc_norm_stderr,none": 0.02228814759117695,
						"acc_stderr,none": 0.022149790663861926,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.4238871846204747,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.7103274559193955,
						"acc_stderr,none": 0.02279477840297185,
						"alias": " - kobest_sentineg",
						"f1,none": 0.7044467317913163,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.014091497219388538,
						"alias": " - kobest_wic",
						"f1,none": 0.4788206034598223,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.723850184358626,
						"acc_stderr,none": 0.017083572537623964,
						"alias": "lambada",
						"perplexity,none": 3.4380992442173275,
						"perplexity_stderr,none": 0.17573940491170567
					},
					"lambada_cloze": {
						"acc,none": 0.06588395109644866,
						"acc_stderr,none": 0.004486889276645239,
						"alias": "lambada_cloze",
						"perplexity,none": 280.56928848684856,
						"perplexity_stderr,none": 16.718657743261268
					},
					"lambada_multilingual": {
						"acc,none": 0.5515233844362508,
						"acc_stderr,none": 0.08421959613535354,
						"alias": "lambada_multilingual",
						"perplexity,none": 19.399493897596383,
						"perplexity_stderr,none": 7.578457941293386
					},
					"lambada_openai": {
						"acc,none": 0.7574228604696294,
						"acc_stderr,none": 0.005971813173819642,
						"alias": " - lambada_openai",
						"perplexity,none": 3.1130732111761272,
						"perplexity_stderr,none": 0.06018854865655288
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.060159130603531924,
						"acc_stderr,none": 0.0033127602912079203,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 253.26224322105384,
						"perplexity_stderr,none": 9.083184903769897
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.43916165340578306,
						"acc_stderr,none": 0.006914218960391649,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 32.03570079387068,
						"perplexity_stderr,none": 1.7781556234407807
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7554822433533864,
						"acc_stderr,none": 0.005987967089937295,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.113720018113587,
						"perplexity_stderr,none": 0.060205226719520916
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.469241218707549,
						"acc_stderr,none": 0.006952784103387315,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 26.103187211935005,
						"perplexity_stderr,none": 1.2782254603231549
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5643314574034543,
						"acc_stderr,none": 0.0069080791377573265,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 15.196001423575828,
						"perplexity_stderr,none": 0.7367960447688336
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5294003493110809,
						"acc_stderr,none": 0.006953924718792949,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 20.54886004048683,
						"perplexity_stderr,none": 1.0893185401363403
					},
					"lambada_standard": {
						"acc,none": 0.6920240636522415,
						"acc_stderr,none": 0.006431778256505184,
						"alias": " - lambada_standard",
						"perplexity,none": 3.7621472038382966,
						"perplexity_stderr,none": 0.07499156305070413
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.07160877158936542,
						"acc_stderr,none": 0.00359220157489281,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 307.8763337526433,
						"perplexity_stderr,none": 10.181714531567936
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.32251908396946566,
						"exact_match_stderr,get-answer": 0.011793376115720528
					},
					"logiqa": {
						"acc,none": 0.250384024577573,
						"acc_norm,none": 0.2995391705069124,
						"acc_norm_stderr,none": 0.017966441188587944,
						"acc_stderr,none": 0.016992843055190027,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.26717557251908397,
						"acc_norm,none": 0.2875318066157761,
						"acc_norm_stderr,none": 0.011419250355256812,
						"acc_stderr,none": 0.011163753808132634,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2592964824120603,
						"acc_norm,none": 0.25862646566164155,
						"acc_norm_stderr,none": 0.008015961308376581,
						"acc_stderr,none": 0.00802271023810577,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3419826307985596,
						"acc_stderr,none": 0.004882156585093111,
						"alias": "mc_taco",
						"f1,none": 0.50710035700119,
						"f1_stderr,none": 0.00545012007646272
					},
					"medmcqa": {
						"acc,none": 0.3724599569686828,
						"acc_norm,none": 0.3724599569686828,
						"acc_norm_stderr,none": 0.007475986383828151,
						"acc_stderr,none": 0.007475986383828151,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.384917517674784,
						"acc_norm,none": 0.384917517674784,
						"acc_norm_stderr,none": 0.013642908352660028,
						"acc_stderr,none": 0.013642908352660028,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.4203817120068366,
						"acc_stderr,none": 0.09419170871611754,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.45185185185185184,
						"acc_stderr,none": 0.04299268905480864,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3881578947368421,
						"acc_stderr,none": 0.03965842097512744,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001974,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4377358490566038,
						"acc_stderr,none": 0.030533338430467512,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4236111111111111,
						"acc_stderr,none": 0.0413212501972337,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621505,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.048783173121456316,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3699421965317919,
						"acc_stderr,none": 0.0368122963339432,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.04533838195929775,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.05024183937956911,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.4085106382978723,
						"acc_stderr,none": 0.03213418026701576,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2982456140350877,
						"acc_stderr,none": 0.04303684033537318,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.46206896551724136,
						"acc_stderr,none": 0.04154659671707546,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.29894179894179895,
						"acc_stderr,none": 0.023577604791655795,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.04073524322147127,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.43548387096774194,
						"acc_stderr,none": 0.028206225591502744,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.3448275862068966,
						"acc_stderr,none": 0.03344283744280458,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.048783173121456316,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.593939393939394,
						"acc_stderr,none": 0.03834816355401181,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.45454545454545453,
						"acc_stderr,none": 0.03547601494006938,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5751295336787565,
						"acc_stderr,none": 0.03567471335212541,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.37435897435897436,
						"acc_stderr,none": 0.0245375915728305,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_stderr,none": 0.026962424325073835,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3739495798319328,
						"acc_stderr,none": 0.03142946637883708,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23178807947019867,
						"acc_stderr,none": 0.03445406271987054,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5412844036697247,
						"acc_stderr,none": 0.021364122533881695,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2361111111111111,
						"acc_stderr,none": 0.028963702570791013,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.553921568627451,
						"acc_stderr,none": 0.034888454513049734,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.6497890295358649,
						"acc_stderr,none": 0.03105239193758435,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4618834080717489,
						"acc_stderr,none": 0.03346015011973228,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5419847328244275,
						"acc_stderr,none": 0.04369802690578756,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.40085015940488844,
						"acc_stderr,none": 0.10356667225531478,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.47107438016528924,
						"acc_stderr,none": 0.04556710331269498,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5370370370370371,
						"acc_stderr,none": 0.04820403072760627,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.4233128834355828,
						"acc_stderr,none": 0.03881891213334383,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841043,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5339805825242718,
						"acc_stderr,none": 0.0493929144727348,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6495726495726496,
						"acc_stderr,none": 0.031256108244218817,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.05,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5925925925925926,
						"acc_stderr,none": 0.017570705239256555,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.42196531791907516,
						"acc_stderr,none": 0.02658923114217426,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23687150837988827,
						"acc_stderr,none": 0.014219570788103986,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.45098039215686275,
						"acc_stderr,none": 0.028491993586171566,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.47473447055037016,
						"acc_stderr,none": 0.08028084119759857,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4919614147909968,
						"acc_stderr,none": 0.028394421370984545,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.49382716049382713,
						"acc_stderr,none": 0.027818623962583295,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.3262411347517731,
						"acc_stderr,none": 0.02796845304356317,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.33702737940026073,
						"acc_stderr,none": 0.01207283627369132,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.40441176470588236,
						"acc_stderr,none": 0.02981263070156974,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.42320261437908496,
						"acc_stderr,none": 0.019987809769482064,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.45454545454545453,
						"acc_stderr,none": 0.04769300568972742,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.4122448979591837,
						"acc_stderr,none": 0.03151236044674281,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.46798830029249266,
						"acc_stderr,none": 0.07680576830543599,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.6616915422885572,
						"acc_stderr,none": 0.03345563070339191,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3495084046939423,
						"acc_stderr,none": 0.07570622960163205,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3674698795180723,
						"acc_stderr,none": 0.03753267402120575,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6549707602339181,
						"acc_stderr,none": 0.03645981377388807,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7679062659195109,
						"acc_stderr,none": 0.004261502884575368,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7685109845402767,
						"acc_stderr,none": 0.004253940249622769,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6985294117647058,
						"acc_stderr,none": 0.02274665905021724,
						"alias": "mrpc",
						"f1,none": 0.8188512518409425,
						"f1_stderr,none": 0.016029942720519597
					},
					"multimedqa": {
						"acc,none": 0.39957416607523066,
						"acc_norm,none": 0.3764752691049997,
						"acc_norm_stderr,none": 0.00010913788810020364,
						"acc_stderr,none": 0.06262570773780589,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5210396039603961,
						"acc_stderr,none": 0.007175442024099677,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7129420631693394,
						"mrr_stderr,none": 0.010309442349458057,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4040632054176072,
						"r@2_stderr,none": 0.016495030288906053
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6589541023862012,
						"mrr_stderr,none": 0.010402497433068175,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4717832957110609,
						"r@2_stderr,none": 0.016780531415161348
					},
					"openbookqa": {
						"acc,none": 0.31,
						"acc_norm,none": 0.426,
						"acc_norm_stderr,none": 0.022136577335085634,
						"acc_stderr,none": 0.020704041021724805,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4125,
						"acc_stderr,none": 0.01101056271248756,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.376,
						"acc_stderr,none": 0.010833775211931941,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4275,
						"acc_stderr,none": 0.011064948781886606,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5415,
						"acc_stderr,none": 0.01114454913793036,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.01117991481396971,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5355,
						"acc_stderr,none": 0.011154913314119568,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.527,
						"acc_stderr,none": 0.011166819105029986,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.476,
						"acc_stderr,none": 0.04950308366918228,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.779107725788901,
						"acc_norm,none": 0.7818280739934712,
						"acc_norm_stderr,none": 0.009636081958374381,
						"acc_stderr,none": 0.00967908804884222,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.27177625960717333,
						"acc_norm,none": 0.29243168232280103,
						"acc_norm_stderr,none": 0.0033233000333465802,
						"acc_stderr,none": 0.0032502092833277874,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.626,
						"acc_stderr,none": 0.02166071034720448,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7494161591545646,
						"acc_norm,none": 0.6341794517193212,
						"acc_norm_stderr,none": 0.00889804306834689,
						"acc_stderr,none": 0.15241228879380808,
						"alias": "pythia",
						"bits_per_byte,none": 0.6366215629081775,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.554684205012165,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.1130732111761272,
						"perplexity_stderr,none": 0.06018854865655288,
						"word_perplexity,none": 10.58759040597816,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3829787234042553,
						"acc_norm,none": 0.43439716312056736,
						"acc_norm_stderr,none": 0.057893785323336576,
						"acc_stderr,none": 0.04294560586113103,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.425,
						"acc_norm,none": 0.5416666666666666,
						"acc_norm_stderr,none": 0.04567549854280213,
						"acc_stderr,none": 0.04531634835874827,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.33125,
						"acc_norm,none": 0.43125,
						"acc_norm_stderr,none": 0.039275949840189193,
						"acc_stderr,none": 0.03732598513993524,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.39436619718309857,
						"acc_norm,none": 0.3908450704225352,
						"acc_norm_stderr,none": 0.029005007569909824,
						"acc_stderr,none": 0.029051039507650152,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7270096463022508,
						"acc_stderr,none": 0.0022156300034517477,
						"alias": "qqp",
						"f1,none": 0.7120006262557733,
						"f1_stderr,none": 0.002630864226997886
					},
					"race": {
						"acc,none": 0.3569377990430622,
						"acc_stderr,none": 0.014827656367408903,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2486,
						"em_stderr,none": 0.004322229999342162,
						"f1,none": 0.2571119049757719,
						"f1_stderr,none": 0.004336517977630327
					},
					"rte": {
						"acc,none": 0.6787003610108303,
						"acc_stderr,none": 0.02810862605328869,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.955,
						"acc_norm,none": 0.946,
						"acc_norm_stderr,none": 0.007150883521295433,
						"acc_stderr,none": 0.00655881224140611,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6787003610108303,
						"acc_stderr,none": 0.02810862605328869,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.9185779816513762,
						"acc_stderr,none": 0.0092665883328367,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5853244026791963,
						"acc_norm,none": 0.7762671198640407,
						"acc_norm_stderr,none": 0.0029464645782225863,
						"acc_stderr,none": 0.00348323902091921,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6689627633023859,
						"acc_stderr,none": 0.07478236276341386,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5850360576923077,
						"acc_stderr,none": 0.004931351526367552,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8657139961487788,
						"acc_stderr,none": 0.0034326698768401536,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5607843137254902,
						"acc_stderr,none": 0.004914259136189974,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.33963924222890907,
						"acc_stderr,none": 0.0014597213029538807,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32558139534883723,
						"bleu_acc_stderr,none": 0.016403989469907832,
						"bleu_diff,none": -7.210349986291059,
						"bleu_diff_stderr,none": 0.8473025797244564,
						"bleu_max,none": 26.91417887164884,
						"bleu_max_stderr,none": 0.8065767641511207,
						"rouge1_acc,none": 0.29008567931456547,
						"rouge1_acc_stderr,none": 0.01588623687420952,
						"rouge1_diff,none": -9.317339793504233,
						"rouge1_diff_stderr,none": 0.9164890367831899,
						"rouge1_max,none": 52.24778120923728,
						"rouge1_max_stderr,none": 0.8645590833409083,
						"rouge2_acc,none": 0.2594859241126071,
						"rouge2_acc_stderr,none": 0.015345409485557985,
						"rouge2_diff,none": -10.825591276141028,
						"rouge2_diff_stderr,none": 1.1076795442544722,
						"rouge2_max,none": 36.578614921244075,
						"rouge2_max_stderr,none": 1.0147701724257248,
						"rougeL_acc,none": 0.2876376988984088,
						"rougeL_acc_stderr,none": 0.015846315101394812,
						"rougeL_diff,none": -9.603219847920986,
						"rougeL_diff_stderr,none": 0.9272381302328431,
						"rougeL_max,none": 49.18494814457406,
						"rougeL_max_stderr,none": 0.8853992254050823
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.32558139534883723,
						"bleu_acc_stderr,none": 0.016403989469907832,
						"bleu_diff,none": -7.210349986291059,
						"bleu_diff_stderr,none": 0.8473025797244564,
						"bleu_max,none": 26.91417887164884,
						"bleu_max_stderr,none": 0.8065767641511207,
						"rouge1_acc,none": 0.29008567931456547,
						"rouge1_acc_stderr,none": 0.01588623687420952,
						"rouge1_diff,none": -9.317339793504233,
						"rouge1_diff_stderr,none": 0.9164890367831899,
						"rouge1_max,none": 52.24778120923728,
						"rouge1_max_stderr,none": 0.8645590833409083,
						"rouge2_acc,none": 0.2594859241126071,
						"rouge2_acc_stderr,none": 0.015345409485557985,
						"rouge2_diff,none": -10.825591276141028,
						"rouge2_diff_stderr,none": 1.1076795442544722,
						"rouge2_max,none": 36.578614921244075,
						"rouge2_max_stderr,none": 1.0147701724257248,
						"rougeL_acc,none": 0.2876376988984088,
						"rougeL_acc_stderr,none": 0.015846315101394812,
						"rougeL_diff,none": -9.603219847920986,
						"rougeL_diff_stderr,none": 0.9272381302328431,
						"rougeL_max,none": 49.18494814457406,
						"rougeL_max_stderr,none": 0.8853992254050823
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2692778457772338,
						"acc_stderr,none": 0.015528566637087274,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4100006386805844,
						"acc_stderr,none": 0.014202060054095828,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.06742125984251969,
						"exact_match_stderr,none": 0.005563988522062476
					},
					"wic": {
						"acc,none": 0.5219435736677116,
						"acc_stderr,none": 0.019791633564310452,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6366215629081775,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.554684205012165,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.58759040597816,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7363851617995264,
						"acc_stderr,none": 0.012382849299658464,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4225352112676056,
						"acc_stderr,none": 0.05903984205682581,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8644688644688645,
						"acc_stderr,none": 0.020754380015466267,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6216363636363637,
						"acc_stderr,none": 0.07042376634102472,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.021814300984787635,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231336,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.02059164957122493,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.01963596552972551,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.02238235778196214,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.02217510926561317,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.022187215803029008,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.0213237286328075,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.02031317923174518,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.02043534209189614,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4376974564926372,
						"acc_stderr,none": 0.045281056535284886,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.334136546184739,
						"acc_stderr,none": 0.009454577602463623,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.47269076305220886,
						"acc_stderr,none": 0.010007112889731974,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4903614457831325,
						"acc_stderr,none": 0.0100202105584383,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.41204819277108434,
						"acc_stderr,none": 0.009865802639096744,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5329317269076306,
						"acc_stderr,none": 0.010000311392557843,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4642570281124498,
						"acc_stderr,none": 0.009996432468510362,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4738955823293173,
						"acc_stderr,none": 0.010008404651660658,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.4321285140562249,
						"acc_stderr,none": 0.009929309430958672,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4815261044176707,
						"acc_stderr,none": 0.010015229768356986,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.41927710843373495,
						"acc_stderr,none": 0.009890599137391931,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42650602409638555,
						"acc_stderr,none": 0.009913215943570534,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44899598393574297,
						"acc_stderr,none": 0.00996979347724083,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41566265060240964,
						"acc_stderr,none": 0.009878474341822933,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40883534136546185,
						"acc_stderr,none": 0.009854078067810775,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3522088353413655,
						"acc_stderr,none": 0.009574259292495745,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6337765477408098,
						"acc_stderr,none": 0.05452322084586655,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5936465916611515,
						"acc_stderr,none": 0.01263942942038987,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7736598279285242,
						"acc_stderr,none": 0.01076880147235908,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7227001985440106,
						"acc_stderr,none": 0.011520342548268453,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5784248841826605,
						"acc_stderr,none": 0.012707862131801905,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6048974189278623,
						"acc_stderr,none": 0.012580772976133263,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6757114493712773,
						"acc_stderr,none": 0.012046419229995331,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5493050959629384,
						"acc_stderr,none": 0.012804412720126671,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6902713434811383,
						"acc_stderr,none": 0.011899045981288763,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5532759761747187,
						"acc_stderr,none": 0.012793874526730208,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.585704831237591,
						"acc_stderr,none": 0.012676689821720669,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6439444076770351,
						"acc_stderr,none": 0.01232238063722049,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8168127669139132,
						"acc_stderr,none": 0.039279035099497324,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8808602150537634,
						"acc_stderr,none": 0.006719915957605396,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7288842544316997,
						"acc_stderr,none": 0.014362296895048155,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.8136882129277566,
						"acc_stderr,none": 0.024054621770299663,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6984126984126984,
						"acc_stderr,none": 0.025899880794833654,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7817460317460317,
						"acc_stderr,none": 0.01841746802413971,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/225-EagleX-PreFT-B"
	},
	"./rwkv-x-dev/225-EagleX-PreFT-C": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6426155580608793,
						"acc_norm,none": 0.6214768883878241,
						"acc_norm_stderr,none": 0.08002457149767404,
						"acc_stderr,none": 0.10539048680102525,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.511875,
						"acc_stderr,none": 0.05413581523906792,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.17805,
						"acc_stderr,none": 0.23338414305592642,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8297910447761194,
						"acc_stderr,none": 0.15417513305795172,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2630014858841011,
						"acc_norm,none": 0.2630014858841011,
						"acc_norm_stderr,none": 0.1136318857703767,
						"acc_stderr,none": 0.1136318857703767,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.29027801761353833,
						"acc_norm,none": 0.29027801761353833,
						"acc_norm_stderr,none": 0.0494352232750777,
						"acc_stderr,none": 0.0494352232750777,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.6963793231961835,
						"likelihood_diff_stderr,none": 0.5557265725941885,
						"pct_stereotype,none": 0.6167262969588551,
						"pct_stereotype_stderr,none": 0.07115478371445777
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05019685039370079,
						"exact_match_stderr,none": 0.004845070213000883
					},
					"glue": {
						"acc,none": 0.7353203906622201,
						"acc_stderr,none": 0.004199638430116758,
						"alias": "glue",
						"f1,none": 0.7188939898342165,
						"f1_stderr,none": 9.454532037259184e-05,
						"mcc,none": 0.20990893922617596,
						"mcc_stderr,none": 0.03322694957306379
					},
					"kmmlu": {
						"acc,none": 0.2734911926075657,
						"acc_norm,none": 0.2734911926075657,
						"acc_norm_stderr,none": 0.026985357235352593,
						"acc_stderr,none": 0.026985357235352593,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.575093181319886,
						"acc_norm,none": 0.556,
						"acc_norm_stderr,none": 0.0004947174348697381,
						"acc_stderr,none": 0.0663161325908527,
						"alias": "kobest",
						"f1,none": 0.5434106181597453,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7261789248981176,
						"acc_stderr,none": 0.01502144414894413,
						"alias": "lambada",
						"perplexity,none": 3.3934759378729287,
						"perplexity_stderr,none": 0.1584231463262561
					},
					"lambada_cloze": {
						"acc,none": 0.07451969726372987,
						"acc_stderr,none": 0.007205737235713763,
						"alias": "lambada_cloze",
						"perplexity,none": 195.3136477899709,
						"perplexity_stderr,none": 6.773603281318504
					},
					"lambada_multilingual": {
						"acc,none": 0.5529206287599456,
						"acc_stderr,none": 0.082913225922017,
						"alias": "lambada_multilingual",
						"perplexity,none": 18.947791299404624,
						"perplexity_stderr,none": 7.397634193638584
					},
					"mmlu": {
						"acc,none": 0.43740207947585813,
						"acc_stderr,none": 0.0981320452427674,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.4055260361317747,
						"acc_stderr,none": 0.10449544307428435,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.4943675571290633,
						"acc_stderr,none": 0.0863296496110634,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4991875203119922,
						"acc_stderr,none": 0.0846095968025933,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3685379004123057,
						"acc_stderr,none": 0.07572748993106576,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.4085166784953868,
						"acc_norm,none": 0.3811215723881497,
						"acc_norm_stderr,none": 0.0001163665346475683,
						"acc_stderr,none": 0.05936169842861073,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4755714285714286,
						"acc_stderr,none": 0.05750020234947709,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7546070530463469,
						"acc_norm,none": 0.626075904569061,
						"acc_norm_stderr,none": 0.00847506824884468,
						"acc_stderr,none": 0.1484193150056166,
						"alias": "pythia",
						"bits_per_byte,none": 0.6378579730841933,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5560171625953843,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.1059236203378835,
						"perplexity_stderr,none": 0.06002017421445837,
						"word_perplexity,none": 10.63622300495702,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.37943262411347517,
						"acc_norm,none": 0.42907801418439717,
						"acc_norm_stderr,none": 0.05156469126629902,
						"acc_stderr,none": 0.048465217230203445,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6628398389404678,
						"acc_stderr,none": 0.07742939856360101,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.34398610829929605,
						"acc_stderr,none": 0.0014426392096260243,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3378212974296206,
						"bleu_acc_stderr,none": 0.016557167322516875,
						"bleu_diff,none": -6.188108758444612,
						"bleu_diff_stderr,none": 0.8494358047626563,
						"bleu_max,none": 26.037006248282257,
						"bleu_max_stderr,none": 0.7867910588330527,
						"rouge1_acc,none": 0.3072215422276622,
						"rouge1_acc_stderr,none": 0.016150201321323016,
						"rouge1_diff,none": -7.792478728141166,
						"rouge1_diff_stderr,none": 0.9207652003487916,
						"rouge1_max,none": 51.75737306585073,
						"rouge1_max_stderr,none": 0.850037983488124,
						"rouge2_acc,none": 0.2729498164014688,
						"rouge2_acc_stderr,none": 0.015594753632006533,
						"rouge2_diff,none": -9.401609276652774,
						"rouge2_diff_stderr,none": 1.1190413430569957,
						"rouge2_max,none": 35.75423769869886,
						"rouge2_max_stderr,none": 0.999547150382563,
						"rougeL_acc,none": 0.30599755201958384,
						"rougeL_acc_stderr,none": 0.016132229728155048,
						"rougeL_diff,none": -8.141165872399707,
						"rougeL_diff_stderr,none": 0.9399019251251466,
						"rougeL_max,none": 48.74607552925405,
						"rougeL_max_stderr,none": 0.867035965150387
					},
					"xcopa": {
						"acc,none": 0.6221818181818182,
						"acc_stderr,none": 0.07099005621458672,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43890227576974566,
						"acc_stderr,none": 0.046761630567835805,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6328740749654052,
						"acc_stderr,none": 0.06137171904240014,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8161384580804675,
						"acc_stderr,none": 0.0375173364281453,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6426155580608793,
						"acc_norm,none": 0.6214768883878241,
						"acc_norm_stderr,none": 0.08002457149767404,
						"acc_stderr,none": 0.10539048680102525,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.511875,
						"acc_stderr,none": 0.05413581523906792,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.01532510550889813,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.015768596914394382,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.46166666666666667,
						"acc_stderr,none": 0.01439727512084776,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4197952218430034,
						"acc_norm,none": 0.45307167235494883,
						"acc_norm_stderr,none": 0.014546892052005631,
						"acc_stderr,none": 0.014422181226303026,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7525252525252525,
						"acc_norm,none": 0.7045454545454546,
						"acc_norm_stderr,none": 0.009361987126556455,
						"acc_stderr,none": 0.00885511441483471,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.17805,
						"acc_stderr,none": 0.23338414305592642,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.1405,
						"acc_stderr,none": 0.007772392169726285,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.354,
						"acc_stderr,none": 0.01069575614904348,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.065,
						"acc_stderr,none": 0.005513864466114145,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.9345,
						"acc_stderr,none": 0.005533550857500552,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.008,
						"acc_stderr,none": 0.0019924821184884632,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.1645,
						"acc_stderr,none": 0.008291818384773239,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.004,
						"acc_stderr,none": 0.0014117352790976921,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.061,
						"acc_stderr,none": 0.005352926948264492,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.000706929893933947,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0525,
						"acc_stderr,none": 0.004988418302285793,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0034707158351409977,
						"acc_stderr,none": 0.0012252178743912103,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8297910447761194,
						"acc_stderr,none": 0.15417513305795172,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.00833333333333335,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767615,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578028,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621235,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792963,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.012559527926707347,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.012975838021968764,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380715,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.00838416926679639,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998864,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.0066278147173807036,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274703,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584939,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557428,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666672,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274701,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.00757207609155742,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357798,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.671,
						"acc_stderr,none": 0.014865395385928364,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.697,
						"acc_stderr,none": 0.014539683710535246,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.00745483565040673,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336664,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274702,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.563,
						"acc_stderr,none": 0.015693223928730377,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400229,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.611,
						"acc_stderr,none": 0.015424555647308495,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.01407885699246261,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280307,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.00905439020486645,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.00823035471524406,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.767,
						"acc_stderr,none": 0.013374972519220063,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.00721297629463923,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.353,
						"acc_stderr,none": 0.015120172605483694,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.668,
						"acc_stderr,none": 0.014899597242811494,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.615,
						"acc_stderr,none": 0.015395194445410806,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.014267009061031307,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.015654426245029274,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408052,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240655,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557844004,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578185,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783226,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.0049395748196984605,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525047,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.734,
						"acc_stderr,none": 0.013979965645145158,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.802,
						"acc_stderr,none": 0.012607733934175322,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118759,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783238,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469308,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.604,
						"acc_stderr,none": 0.015473313265859406,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.01567232023733621,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.01180043432464459,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178326,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.015090650341444235,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.837,
						"acc_stderr,none": 0.011686212712746835,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651521,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.294,
						"acc_stderr,none": 0.014414290540008215,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.822,
						"acc_stderr,none": 0.012102167676183597,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103312,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341681,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.005568393575081361,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.00914437639315113,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.415,
						"acc_stderr,none": 0.015589035185604632,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.426,
						"acc_stderr,none": 0.01564508768811381,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6862385321100918,
						"acc_stderr,none": 0.008115773046958274,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.04459412925079224,
						"alias": "cb",
						"f1,none": 0.7014773484928237,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2630014858841011,
						"acc_norm,none": 0.2630014858841011,
						"acc_norm_stderr,none": 0.1136318857703767,
						"acc_stderr,none": 0.1136318857703767,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.062069005411206316,
						"acc_stderr,none": 0.062069005411206316,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275463,
						"acc_stderr,none": 0.08124094920275463,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0879391124952055,
						"acc_stderr,none": 0.0879391124952055,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.06595297051445341,
						"acc_stderr,none": 0.06595297051445341,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445797,
						"acc_stderr,none": 0.06180629713445797,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.07802030664724673,
						"acc_stderr,none": 0.07802030664724673,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857371,
						"acc_stderr,none": 0.08982552969857371,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063838,
						"acc_stderr,none": 0.07213122508063838,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.07988892740217939,
						"acc_stderr,none": 0.07988892740217939,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.060073850409370216,
						"acc_stderr,none": 0.060073850409370216,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.11236664374387369,
						"acc_stderr,none": 0.11236664374387369,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628253,
						"acc_stderr,none": 0.05817221556628253,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.38636363636363635,
						"acc_norm,none": 0.38636363636363635,
						"acc_norm_stderr,none": 0.07425392901036847,
						"acc_stderr,none": 0.07425392901036847,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.32608695652173914,
						"acc_norm,none": 0.32608695652173914,
						"acc_norm_stderr,none": 0.06988152725357213,
						"acc_stderr,none": 0.06988152725357213,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.29027801761353833,
						"acc_norm,none": 0.29027801761353833,
						"acc_norm_stderr,none": 0.0494352232750777,
						"acc_stderr,none": 0.0494352232750777,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28994082840236685,
						"acc_norm,none": 0.28994082840236685,
						"acc_norm_stderr,none": 0.03500638924911012,
						"acc_stderr,none": 0.03500638924911012,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.036628698766429046,
						"acc_stderr,none": 0.036628698766429046,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.34375,
						"acc_norm,none": 0.34375,
						"acc_norm_stderr,none": 0.03766668927755763,
						"acc_stderr,none": 0.03766668927755763,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2606060606060606,
						"acc_norm,none": 0.2606060606060606,
						"acc_norm_stderr,none": 0.03427743175816524,
						"acc_stderr,none": 0.03427743175816524,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3253588516746411,
						"acc_norm,none": 0.3253588516746411,
						"acc_norm_stderr,none": 0.03248523846063361,
						"acc_stderr,none": 0.03248523846063361,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.03515674134876764,
						"acc_stderr,none": 0.03515674134876764,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2900763358778626,
						"acc_norm,none": 0.2900763358778626,
						"acc_norm_stderr,none": 0.03980066246467766,
						"acc_stderr,none": 0.03980066246467766,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2867647058823529,
						"acc_norm,none": 0.2867647058823529,
						"acc_norm_stderr,none": 0.038923544178637824,
						"acc_stderr,none": 0.038923544178637824,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.308411214953271,
						"acc_norm,none": 0.308411214953271,
						"acc_norm_stderr,none": 0.04485760883316698,
						"acc_stderr,none": 0.04485760883316698,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.29102167182662536,
						"acc_norm,none": 0.29102167182662536,
						"acc_norm_stderr,none": 0.02531344242805741,
						"acc_stderr,none": 0.02531344242805741,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2696078431372549,
						"acc_norm,none": 0.2696078431372549,
						"acc_norm_stderr,none": 0.031145570659486782,
						"acc_stderr,none": 0.031145570659486782,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.33519553072625696,
						"acc_norm,none": 0.33519553072625696,
						"acc_norm_stderr,none": 0.035382301081428424,
						"acc_stderr,none": 0.035382301081428424,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24472573839662448,
						"acc_norm,none": 0.24472573839662448,
						"acc_norm_stderr,none": 0.027985699387036402,
						"acc_stderr,none": 0.027985699387036402,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.38317757009345793,
						"acc_norm,none": 0.38317757009345793,
						"acc_norm_stderr,none": 0.04722013080771233,
						"acc_stderr,none": 0.04722013080771233,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.330188679245283,
						"acc_norm,none": 0.330188679245283,
						"acc_norm_stderr,none": 0.045894715469579954,
						"acc_stderr,none": 0.045894715469579954,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809826,
						"acc_stderr,none": 0.039578354719809826,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2761904761904762,
						"acc_norm,none": 0.2761904761904762,
						"acc_norm_stderr,none": 0.04384295586918883,
						"acc_stderr,none": 0.04384295586918883,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2600732600732601,
						"acc_norm,none": 0.2600732600732601,
						"acc_norm_stderr,none": 0.02659853762760147,
						"acc_stderr,none": 0.02659853762760147,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.31862745098039214,
						"acc_norm,none": 0.31862745098039214,
						"acc_norm_stderr,none": 0.032702871814820816,
						"acc_stderr,none": 0.032702871814820816,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2807017543859649,
						"acc_norm,none": 0.2807017543859649,
						"acc_norm_stderr,none": 0.034462962170884265,
						"acc_stderr,none": 0.034462962170884265,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2589928057553957,
						"acc_norm,none": 0.2589928057553957,
						"acc_norm_stderr,none": 0.037291986581642324,
						"acc_stderr,none": 0.037291986581642324,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.037676093121953455,
						"acc_stderr,none": 0.037676093121953455,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.34355828220858897,
						"acc_norm,none": 0.34355828220858897,
						"acc_norm_stderr,none": 0.03731133519673893,
						"acc_stderr,none": 0.03731133519673893,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.03451628876250621,
						"acc_stderr,none": 0.03451628876250621,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.02839429305079051,
						"acc_stderr,none": 0.02839429305079051,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.031911782267135466,
						"acc_stderr,none": 0.031911782267135466,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.42436974789915966,
						"acc_norm,none": 0.42436974789915966,
						"acc_norm_stderr,none": 0.032104790510157764,
						"acc_stderr,none": 0.032104790510157764,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26956521739130435,
						"acc_norm,none": 0.26956521739130435,
						"acc_norm_stderr,none": 0.029322764228949517,
						"acc_stderr,none": 0.029322764228949517,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.038532548365520024,
						"acc_stderr,none": 0.038532548365520024,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.038221270785361555,
						"acc_stderr,none": 0.038221270785361555,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2897727272727273,
						"acc_norm,none": 0.2897727272727273,
						"acc_norm_stderr,none": 0.03429323080239875,
						"acc_stderr,none": 0.03429323080239875,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.30201342281879195,
						"acc_norm,none": 0.30201342281879195,
						"acc_norm_stderr,none": 0.03774033930941344,
						"acc_stderr,none": 0.03774033930941344,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3135593220338983,
						"acc_norm,none": 0.3135593220338983,
						"acc_norm_stderr,none": 0.04289122333662572,
						"acc_stderr,none": 0.04289122333662572,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.03304756158810786,
						"acc_stderr,none": 0.03304756158810786,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.03822127078536156,
						"acc_stderr,none": 0.03822127078536156,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.040735243221471255,
						"acc_stderr,none": 0.040735243221471255,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3023255813953488,
						"acc_norm,none": 0.3023255813953488,
						"acc_norm_stderr,none": 0.0351209126342837,
						"acc_stderr,none": 0.0351209126342837,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26520681265206814,
						"acc_norm,none": 0.26520681265206814,
						"acc_norm_stderr,none": 0.021801329069745193,
						"acc_stderr,none": 0.021801329069745193,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.37850467289719625,
						"acc_norm,none": 0.37850467289719625,
						"acc_norm_stderr,none": 0.033232633255714746,
						"acc_stderr,none": 0.033232633255714746,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2845528455284553,
						"acc_norm,none": 0.2845528455284553,
						"acc_norm_stderr,none": 0.04084983733239223,
						"acc_stderr,none": 0.04084983733239223,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2786885245901639,
						"acc_norm,none": 0.2786885245901639,
						"acc_norm_stderr,none": 0.04075944659069252,
						"acc_stderr,none": 0.04075944659069252,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03260773253630123,
						"acc_stderr,none": 0.03260773253630123,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.03387720998298804,
						"acc_stderr,none": 0.03387720998298804,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31216931216931215,
						"acc_norm,none": 0.31216931216931215,
						"acc_norm_stderr,none": 0.03379535035917228,
						"acc_stderr,none": 0.03379535035917228,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.04126514736324099,
						"acc_stderr,none": 0.04126514736324099,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.30344827586206896,
						"acc_norm,none": 0.30344827586206896,
						"acc_norm_stderr,none": 0.038312260488503336,
						"acc_stderr,none": 0.038312260488503336,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.29523809523809524,
						"acc_norm,none": 0.29523809523809524,
						"acc_norm_stderr,none": 0.044729159560441434,
						"acc_stderr,none": 0.044729159560441434,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.0340385177358705,
						"acc_stderr,none": 0.0340385177358705,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767256,
						"acc_stderr,none": 0.030113040167767256,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.02251703243459229,
						"acc_stderr,none": 0.02251703243459229,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.0312732353098133,
						"acc_stderr,none": 0.0312732353098133,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.0351734690130024,
						"acc_stderr,none": 0.0351734690130024,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.0391545063041425,
						"acc_stderr,none": 0.0391545063041425,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3053097345132743,
						"acc_norm,none": 0.3053097345132743,
						"acc_norm_stderr,none": 0.03070256598213893,
						"acc_stderr,none": 0.03070256598213893,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.03588624800091709,
						"acc_stderr,none": 0.03588624800091709,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.03231470996617758,
						"acc_stderr,none": 0.03231470996617758,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2781065088757396,
						"acc_norm,none": 0.2781065088757396,
						"acc_norm_stderr,none": 0.034569054303762434,
						"acc_stderr,none": 0.034569054303762434,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2795031055900621,
						"acc_norm,none": 0.2795031055900621,
						"acc_norm_stderr,none": 0.035477203909303916,
						"acc_stderr,none": 0.035477203909303916,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.036121818481912725,
						"acc_stderr,none": 0.036121818481912725,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.20990893922617596,
						"mcc_stderr,none": 0.03322694957306379
					},
					"copa": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.03684529491774711,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.6963793231961835,
						"likelihood_diff_stderr,none": 0.5557265725941885,
						"pct_stereotype,none": 0.6167262969588551,
						"pct_stereotype_stderr,none": 0.07115478371445777
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.991428145497913,
						"likelihood_diff_stderr,none": 0.0937618993275015,
						"pct_stereotype,none": 0.6487775790101371,
						"pct_stereotype_stderr,none": 0.011660093294940081
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.157967032967033,
						"likelihood_diff_stderr,none": 0.3989303614934034,
						"pct_stereotype,none": 0.6813186813186813,
						"pct_stereotype_stderr,none": 0.04911704114831279
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.590909090909091,
						"likelihood_diff_stderr,none": 1.622733638897622,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.119230769230769,
						"likelihood_diff_stderr,none": 0.6233899379945906,
						"pct_stereotype,none": 0.7846153846153846,
						"pct_stereotype_stderr,none": 0.05138611236879767
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.82265625,
						"likelihood_diff_stderr,none": 0.168117996590784,
						"pct_stereotype,none": 0.621875,
						"pct_stereotype_stderr,none": 0.02715025441234715
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.966435185185185,
						"likelihood_diff_stderr,none": 0.2634633957864098,
						"pct_stereotype,none": 0.6064814814814815,
						"pct_stereotype_stderr,none": 0.03331747876370312
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.567708333333333,
						"likelihood_diff_stderr,none": 0.38879756422309775,
						"pct_stereotype,none": 0.7916666666666666,
						"pct_stereotype_stderr,none": 0.04819715314419525
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.8503937007874014,
						"likelihood_diff_stderr,none": 0.1683865997323872,
						"pct_stereotype,none": 0.5570866141732284,
						"pct_stereotype_stderr,none": 0.022060572810922933
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.195945945945946,
						"likelihood_diff_stderr,none": 0.39160160391608967,
						"pct_stereotype,none": 0.7567567567567568,
						"pct_stereotype_stderr,none": 0.04090743073860919
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.356182795698925,
						"likelihood_diff_stderr,none": 0.5067729755507252,
						"pct_stereotype,none": 0.8817204301075269,
						"pct_stereotype_stderr,none": 0.033668704543479845
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.405263157894737,
						"likelihood_diff_stderr,none": 0.24641228143781552,
						"pct_stereotype,none": 0.7,
						"pct_stereotype_stderr,none": 0.03333333333333337
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.4040324985092427,
						"likelihood_diff_stderr,none": 0.07939435006099754,
						"pct_stereotype,none": 0.5849731663685152,
						"pct_stereotype_stderr,none": 0.012035636251338341
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.3333333333333335,
						"likelihood_diff_stderr,none": 0.3111283881416408,
						"pct_stereotype,none": 0.6333333333333333,
						"pct_stereotype_stderr,none": 0.051080705280321645
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.1826923076923075,
						"likelihood_diff_stderr,none": 0.9140349077706358,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.196969696969697,
						"likelihood_diff_stderr,none": 0.5070170532571515,
						"pct_stereotype,none": 0.7575757575757576,
						"pct_stereotype_stderr,none": 0.05315503147315326
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.8738317757009346,
						"likelihood_diff_stderr,none": 0.14124020197718132,
						"pct_stereotype,none": 0.616822429906542,
						"pct_stereotype_stderr,none": 0.027177226212327755
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.7040513833992095,
						"likelihood_diff_stderr,none": 0.20854427387354355,
						"pct_stereotype,none": 0.4150197628458498,
						"pct_stereotype_stderr,none": 0.031038785215783234
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.4479166666666665,
						"likelihood_diff_stderr,none": 0.3934536667158064,
						"pct_stereotype,none": 0.6527777777777778,
						"pct_stereotype_stderr,none": 0.056501146768529645
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.210054347826087,
						"likelihood_diff_stderr,none": 0.1599672151282611,
						"pct_stereotype,none": 0.49130434782608695,
						"pct_stereotype_stderr,none": 0.023334471757161752
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.121739130434783,
						"likelihood_diff_stderr,none": 0.27851089429971,
						"pct_stereotype,none": 0.6869565217391305,
						"pct_stereotype_stderr,none": 0.04343247016610823
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.151098901098901,
						"likelihood_diff_stderr,none": 0.3139482101332897,
						"pct_stereotype,none": 0.7472527472527473,
						"pct_stereotype_stderr,none": 0.0458095185373289
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.000637755102041,
						"likelihood_diff_stderr,none": 0.2516876212567745,
						"pct_stereotype,none": 0.7091836734693877,
						"pct_stereotype_stderr,none": 0.032521566079698076
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05019685039370079,
						"exact_match_stderr,none": 0.004845070213000883
					},
					"glue": {
						"acc,none": 0.7353203906622201,
						"acc_stderr,none": 0.004199638430116758,
						"alias": "glue",
						"f1,none": 0.7188939898342165,
						"f1_stderr,none": 9.454532037259184e-05,
						"mcc,none": 0.20990893922617596,
						"mcc_stderr,none": 0.03322694957306379
					},
					"hellaswag": {
						"acc,none": 0.5598486357299343,
						"acc_norm,none": 0.7491535550687114,
						"acc_norm_stderr,none": 0.004326143430360104,
						"acc_stderr,none": 0.004953907062096603,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2734911926075657,
						"acc_norm,none": 0.2734911926075657,
						"acc_norm_stderr,none": 0.026985357235352593,
						"acc_stderr,none": 0.026985357235352593,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.04163331998932269,
						"acc_stderr,none": 0.04163331998932269,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.279,
						"acc_norm,none": 0.279,
						"acc_norm_stderr,none": 0.01419015011761203,
						"acc_stderr,none": 0.01419015011761203,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.014236526215291347,
						"acc_stderr,none": 0.014236526215291347,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920845,
						"acc_stderr,none": 0.013512312258920845,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.014370995982377937,
						"acc_stderr,none": 0.014370995982377937,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.01780880651013787,
						"acc_stderr,none": 0.01780880651013787,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.014095022868717577,
						"acc_stderr,none": 0.014095022868717577,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.344,
						"acc_norm,none": 0.344,
						"acc_norm_stderr,none": 0.015029633724408943,
						"acc_stderr,none": 0.015029633724408943,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.01435639599990569,
						"acc_stderr,none": 0.01435639599990569,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.030695456590127176,
						"acc_stderr,none": 0.030695456590127176,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.014566646394664384,
						"acc_stderr,none": 0.014566646394664384,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2153846153846154,
						"acc_norm,none": 0.2153846153846154,
						"acc_norm_stderr,none": 0.036194359366126624,
						"acc_stderr,none": 0.036194359366126624,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.04461960433384741,
						"acc_stderr,none": 0.04461960433384741,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729494,
						"acc_stderr,none": 0.014013292702729494,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.014428554438445514,
						"acc_stderr,none": 0.014428554438445514,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633916,
						"acc_stderr,none": 0.014046255632633916,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633915,
						"acc_stderr,none": 0.014046255632633915,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462615,
						"acc_stderr,none": 0.014078856992462615,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.013860415257527911,
						"acc_stderr,none": 0.013860415257527911,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.289,
						"acc_norm,none": 0.289,
						"acc_norm_stderr,none": 0.014341711358296188,
						"acc_stderr,none": 0.014341711358296188,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.01428212095520048,
						"acc_stderr,none": 0.01428212095520048,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.042923469599092816,
						"acc_stderr,none": 0.042923469599092816,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.279,
						"acc_norm,none": 0.279,
						"acc_norm_stderr,none": 0.014190150117612033,
						"acc_stderr,none": 0.014190150117612033,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.313,
						"acc_norm,none": 0.313,
						"acc_norm_stderr,none": 0.014671272822977886,
						"acc_stderr,none": 0.014671272822977886,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.014095022868717581,
						"acc_stderr,none": 0.014095022868717581,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920836,
						"acc_stderr,none": 0.013512312258920836,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259734,
						"acc_stderr,none": 0.013929286594259734,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.013531522534515445,
						"acc_stderr,none": 0.013531522534515445,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.27666666666666667,
						"acc_norm,none": 0.27666666666666667,
						"acc_norm_stderr,none": 0.0182782346012209,
						"acc_stderr,none": 0.0182782346012209,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.013531522534515445,
						"acc_stderr,none": 0.013531522534515445,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740671,
						"acc_stderr,none": 0.014142984975740671,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633908,
						"acc_stderr,none": 0.014046255632633908,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.014095022868717586,
						"acc_stderr,none": 0.014095022868717586,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.040936018074033256,
						"acc_stderr,none": 0.040936018074033256,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.24666666666666667,
						"acc_norm,none": 0.24666666666666667,
						"acc_norm_stderr,none": 0.024929480622100746,
						"acc_stderr,none": 0.024929480622100746,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750645,
						"acc_stderr,none": 0.013626065817750645,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.014326941797231561,
						"acc_stderr,none": 0.014326941797231561,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.013772206565168543,
						"acc_stderr,none": 0.013772206565168543,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.030695456590127176,
						"acc_stderr,none": 0.030695456590127176,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.249,
						"acc_norm,none": 0.249,
						"acc_norm_stderr,none": 0.013681600278702293,
						"acc_stderr,none": 0.013681600278702293,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568193,
						"acc_stderr,none": 0.014029819522568193,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.029601626330440615,
						"acc_stderr,none": 0.029601626330440615,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.324,
						"acc_norm,none": 0.324,
						"acc_norm_stderr,none": 0.01480686473373886,
						"acc_stderr,none": 0.01480686473373886,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.575093181319886,
						"acc_norm,none": 0.556,
						"acc_norm_stderr,none": 0.0004947174348697381,
						"acc_stderr,none": 0.0663161325908527,
						"alias": "kobest",
						"f1,none": 0.5434106181597453,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.6203703703703703,
						"acc_stderr,none": 0.012956173009687336,
						"alias": " - kobest_boolq",
						"f1,none": 0.5816773222679801,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.635,
						"acc_stderr,none": 0.015231776226264891,
						"alias": " - kobest_copa",
						"f1,none": 0.634191929973311,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.432,
						"acc_norm,none": 0.556,
						"acc_norm_stderr,none": 0.022242244375731017,
						"acc_stderr,none": 0.02217510926561316,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.42853444015954356,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.7229219143576826,
						"acc_stderr,none": 0.022490504174607705,
						"alias": " - kobest_sentineg",
						"f1,none": 0.7166640714211565,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4873015873015873,
						"acc_stderr,none": 0.014086951987375836,
						"alias": " - kobest_wic",
						"f1,none": 0.4197191131389463,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7261789248981176,
						"acc_stderr,none": 0.01502144414894413,
						"alias": "lambada",
						"perplexity,none": 3.3934759378729287,
						"perplexity_stderr,none": 0.1584231463262561
					},
					"lambada_cloze": {
						"acc,none": 0.07451969726372987,
						"acc_stderr,none": 0.007205737235713763,
						"alias": "lambada_cloze",
						"perplexity,none": 195.3136477899709,
						"perplexity_stderr,none": 6.773603281318504
					},
					"lambada_multilingual": {
						"acc,none": 0.5529206287599456,
						"acc_stderr,none": 0.082913225922017,
						"alias": "lambada_multilingual",
						"perplexity,none": 18.947791299404624,
						"perplexity_stderr,none": 7.397634193638584
					},
					"lambada_openai": {
						"acc,none": 0.7516010091209004,
						"acc_stderr,none": 0.006019780609042877,
						"alias": " - lambada_openai",
						"perplexity,none": 3.1059236203378835,
						"perplexity_stderr,none": 0.06002017421445837
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.06209974771977489,
						"acc_stderr,none": 0.003362291146995512,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 190.65014561635348,
						"perplexity_stderr,none": 6.4198297753183216
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.43993790025228025,
						"acc_stderr,none": 0.006915536116983778,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 31.534423250580737,
						"perplexity_stderr,none": 1.7461458452500793
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7533475645255191,
						"acc_stderr,none": 0.006005545631215157,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.105395328920996,
						"perplexity_stderr,none": 0.059984307995598965
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.47603337861439937,
						"acc_stderr,none": 0.006957970554902597,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 25.013236872221967,
						"perplexity_stderr,none": 1.2244474662898577
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5664661362313216,
						"acc_stderr,none": 0.006904155467557467,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 14.993321676305035,
						"perplexity_stderr,none": 0.72537802975351
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5288181641762081,
						"acc_stderr,none": 0.006954397730205825,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 20.09257936899438,
						"perplexity_stderr,none": 1.0617615965875753
					},
					"lambada_standard": {
						"acc,none": 0.6988162235590918,
						"acc_stderr,none": 0.0063915964889334305,
						"alias": " - lambada_standard",
						"perplexity,none": 3.681119378893935,
						"perplexity_stderr,none": 0.07226902332757962
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.08693964680768484,
						"acc_stderr,none": 0.003925280991955644,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 199.97714996358832,
						"perplexity_stderr,none": 6.299353710361064
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.3346055979643766,
						"exact_match_stderr,get-answer": 0.011904689707452723
					},
					"logiqa": {
						"acc,none": 0.24423963133640553,
						"acc_norm,none": 0.30568356374807987,
						"acc_norm_stderr,none": 0.01806999734376347,
						"acc_stderr,none": 0.016851689430077556,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2608142493638677,
						"acc_norm,none": 0.28498727735368956,
						"acc_norm_stderr,none": 0.011388893410930606,
						"acc_stderr,none": 0.011077821377656304,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2562814070351759,
						"acc_norm,none": 0.2592964824120603,
						"acc_norm_stderr,none": 0.008022710238105768,
						"acc_stderr,none": 0.00799214693821701,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3543740732895573,
						"acc_stderr,none": 0.0049228029544205396,
						"alias": "mc_taco",
						"f1,none": 0.5041483650561249,
						"f1_stderr,none": 0.0055261524754716155
					},
					"medmcqa": {
						"acc,none": 0.37317714558929,
						"acc_norm,none": 0.37317714558929,
						"acc_norm_stderr,none": 0.007478903272890868,
						"acc_stderr,none": 0.007478903272890868,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.394344069128044,
						"acc_norm,none": 0.394344069128044,
						"acc_norm_stderr,none": 0.013702729616964808,
						"acc_stderr,none": 0.013702729616964808,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.43740207947585813,
						"acc_stderr,none": 0.0981320452427674,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4666666666666667,
						"acc_stderr,none": 0.043097329010363554,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.4144736842105263,
						"acc_stderr,none": 0.04008973785779206,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4641509433962264,
						"acc_stderr,none": 0.030693675018458003,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4861111111111111,
						"acc_stderr,none": 0.041795966175810016,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.049431107042371025,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.04461960433384741,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.4161849710982659,
						"acc_stderr,none": 0.03758517775404947,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.04389869956808778,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.05009082659620333,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.39148936170212767,
						"acc_stderr,none": 0.031907012423268113,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2982456140350877,
						"acc_stderr,none": 0.043036840335373173,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4689655172413793,
						"acc_stderr,none": 0.04158632762097828,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.3148148148148148,
						"acc_stderr,none": 0.023919984164047736,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3412698412698413,
						"acc_stderr,none": 0.042407993275749234,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117317,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4645161290322581,
						"acc_stderr,none": 0.028372287797962956,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.3645320197044335,
						"acc_stderr,none": 0.0338640574606209,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.04975698519562429,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5818181818181818,
						"acc_stderr,none": 0.03851716319398393,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.47474747474747475,
						"acc_stderr,none": 0.03557806245087314,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.6062176165803109,
						"acc_stderr,none": 0.03526077095548241,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.4128205128205128,
						"acc_stderr,none": 0.024962683564331796,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3074074074074074,
						"acc_stderr,none": 0.028133252578815632,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.37815126050420167,
						"acc_stderr,none": 0.031499305777849054,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2781456953642384,
						"acc_stderr,none": 0.03658603262763743,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5871559633027523,
						"acc_stderr,none": 0.021109128133413917,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.26851851851851855,
						"acc_stderr,none": 0.030225226160012404,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5784313725490197,
						"acc_stderr,none": 0.03465868196380762,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.6413502109704642,
						"acc_stderr,none": 0.031219569445301854,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4618834080717489,
						"acc_stderr,none": 0.03346015011973228,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5725190839694656,
						"acc_stderr,none": 0.04338920305792401,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.4055260361317747,
						"acc_stderr,none": 0.10449544307428435,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.47107438016528924,
						"acc_stderr,none": 0.04556710331269498,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5370370370370371,
						"acc_stderr,none": 0.04820403072760627,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.4294478527607362,
						"acc_stderr,none": 0.03889066619112722,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.29464285714285715,
						"acc_stderr,none": 0.0432704093257873,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.6019417475728155,
						"acc_stderr,none": 0.04846748253977239,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6794871794871795,
						"acc_stderr,none": 0.030572811310299618,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.05024183937956912,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.6015325670498084,
						"acc_stderr,none": 0.01750743860277741,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.4161849710982659,
						"acc_stderr,none": 0.026538189104705488,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.22793296089385476,
						"acc_stderr,none": 0.014030149950805097,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.4934640522875817,
						"acc_stderr,none": 0.028627470550556054,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.4943675571290633,
						"acc_stderr,none": 0.0863296496110634,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.5209003215434084,
						"acc_stderr,none": 0.028373270961069414,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.02782074420373286,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.32269503546099293,
						"acc_stderr,none": 0.027889139300534792,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.34615384615384615,
						"acc_stderr,none": 0.012150699768228555,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4485294117647059,
						"acc_stderr,none": 0.030211479609121603,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.45588235294117646,
						"acc_stderr,none": 0.020148939420415738,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.43636363636363634,
						"acc_stderr,none": 0.04750185058907296,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.4489795918367347,
						"acc_stderr,none": 0.03184213866687579,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4991875203119922,
						"acc_stderr,none": 0.0846095968025933,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.6965174129353234,
						"acc_stderr,none": 0.03251006816458619,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3685379004123057,
						"acc_stderr,none": 0.07572748993106576,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.68,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3674698795180723,
						"acc_stderr,none": 0.03753267402120575,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6491228070175439,
						"acc_stderr,none": 0.036602988340491624,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7777890983188996,
						"acc_stderr,none": 0.004196532393576892,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7771562245728234,
						"acc_stderr,none": 0.004197160438943728,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7034313725490197,
						"acc_stderr,none": 0.022639991831486735,
						"alias": "mrpc",
						"f1,none": 0.8202080237741456,
						"f1_stderr,none": 0.016067921323912788
					},
					"multimedqa": {
						"acc,none": 0.4085166784953868,
						"acc_norm,none": 0.3811215723881497,
						"acc_norm_stderr,none": 0.0001163665346475683,
						"acc_stderr,none": 0.05936169842861073,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5468234323432343,
						"acc_stderr,none": 0.0071502425030918896,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7123777291171975,
						"mrr_stderr,none": 0.010327734066503792,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4006772009029345,
						"r@2_stderr,none": 0.01647236966063944
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6590481580783767,
						"mrr_stderr,none": 0.010421714024453141,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4729119638826185,
						"r@2_stderr,none": 0.01678263288163964
					},
					"openbookqa": {
						"acc,none": 0.314,
						"acc_norm,none": 0.416,
						"acc_norm_stderr,none": 0.022064943313928866,
						"acc_stderr,none": 0.020776701920308997,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.404,
						"acc_stderr,none": 0.010975072943404662,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.010856285251628973,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.408,
						"acc_stderr,none": 0.010992197878818584,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5455,
						"acc_stderr,none": 0.011136735987003715,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5335,
						"acc_stderr,none": 0.011158007239770807,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5315,
						"acc_stderr,none": 0.01116092102288328,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5265,
						"acc_stderr,none": 0.011167418260963935,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4755714285714286,
						"acc_stderr,none": 0.05750020234947709,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7742110990206746,
						"acc_norm,none": 0.7850924918389554,
						"acc_norm_stderr,none": 0.009583665082653306,
						"acc_stderr,none": 0.009754980670917332,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2660653287788215,
						"acc_norm,none": 0.2939795046968403,
						"acc_norm_stderr,none": 0.0033284369336675007,
						"acc_stderr,none": 0.0032284643559613965,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.0217288814387017,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7546070530463469,
						"acc_norm,none": 0.626075904569061,
						"acc_norm_stderr,none": 0.00847506824884468,
						"acc_stderr,none": 0.1484193150056166,
						"alias": "pythia",
						"bits_per_byte,none": 0.6378579730841933,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5560171625953843,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.1059236203378835,
						"perplexity_stderr,none": 0.06002017421445837,
						"word_perplexity,none": 10.63622300495702,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.37943262411347517,
						"acc_norm,none": 0.42907801418439717,
						"acc_norm_stderr,none": 0.05156469126629902,
						"acc_stderr,none": 0.048465217230203445,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.44166666666666665,
						"acc_norm,none": 0.5166666666666667,
						"acc_norm_stderr,none": 0.045809453927047654,
						"acc_stderr,none": 0.04552192400253556,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.0392039498715957,
						"acc_stderr,none": 0.03675892481369823,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3908450704225352,
						"acc_norm,none": 0.39436619718309857,
						"acc_norm_stderr,none": 0.029051039507650152,
						"acc_stderr,none": 0.029005007569909827,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.006765015986877446,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7446450655453871,
						"acc_stderr,none": 0.002168704025488128,
						"alias": "qqp",
						"f1,none": 0.7180313541268367,
						"f1_stderr,none": 0.0026641857048737183
					},
					"race": {
						"acc,none": 0.3569377990430622,
						"acc_stderr,none": 0.014827656367408902,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2569,
						"em_stderr,none": 0.004369456283660683,
						"f1,none": 0.2662519050002098,
						"f1_stderr,none": 0.0043831504246620015
					},
					"rte": {
						"acc,none": 0.6931407942238267,
						"acc_stderr,none": 0.027760403038058972,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.94,
						"acc_norm,none": 0.938,
						"acc_norm_stderr,none": 0.007629823996280308,
						"acc_stderr,none": 0.007513751157474914,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6895306859205776,
						"acc_stderr,none": 0.027850410392630694,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.9162844036697247,
						"acc_stderr,none": 0.00938445934634095,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.587073877836649,
						"acc_norm,none": 0.7770168949315206,
						"acc_norm_stderr,none": 0.002942943554832927,
						"acc_stderr,none": 0.003481074190612269,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6628398389404678,
						"acc_stderr,none": 0.07742939856360101,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5570913461538461,
						"acc_stderr,none": 0.004971526440358204,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8601398601398601,
						"acc_stderr,none": 0.0034918932662458334,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5754901960784313,
						"acc_stderr,none": 0.004894226127469615,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.34398610829929605,
						"acc_stderr,none": 0.0014426392096260243,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3378212974296206,
						"bleu_acc_stderr,none": 0.016557167322516875,
						"bleu_diff,none": -6.188108758444612,
						"bleu_diff_stderr,none": 0.8494358047626563,
						"bleu_max,none": 26.037006248282257,
						"bleu_max_stderr,none": 0.7867910588330527,
						"rouge1_acc,none": 0.3072215422276622,
						"rouge1_acc_stderr,none": 0.016150201321323016,
						"rouge1_diff,none": -7.792478728141166,
						"rouge1_diff_stderr,none": 0.9207652003487916,
						"rouge1_max,none": 51.75737306585073,
						"rouge1_max_stderr,none": 0.850037983488124,
						"rouge2_acc,none": 0.2729498164014688,
						"rouge2_acc_stderr,none": 0.015594753632006533,
						"rouge2_diff,none": -9.401609276652774,
						"rouge2_diff_stderr,none": 1.1190413430569957,
						"rouge2_max,none": 35.75423769869886,
						"rouge2_max_stderr,none": 0.999547150382563,
						"rougeL_acc,none": 0.30599755201958384,
						"rougeL_acc_stderr,none": 0.016132229728155048,
						"rougeL_diff,none": -8.141165872399707,
						"rougeL_diff_stderr,none": 0.9399019251251466,
						"rougeL_max,none": 48.74607552925405,
						"rougeL_max_stderr,none": 0.867035965150387
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3378212974296206,
						"bleu_acc_stderr,none": 0.016557167322516875,
						"bleu_diff,none": -6.188108758444612,
						"bleu_diff_stderr,none": 0.8494358047626563,
						"bleu_max,none": 26.037006248282257,
						"bleu_max_stderr,none": 0.7867910588330527,
						"rouge1_acc,none": 0.3072215422276622,
						"rouge1_acc_stderr,none": 0.016150201321323016,
						"rouge1_diff,none": -7.792478728141166,
						"rouge1_diff_stderr,none": 0.9207652003487916,
						"rouge1_max,none": 51.75737306585073,
						"rouge1_max_stderr,none": 0.850037983488124,
						"rouge2_acc,none": 0.2729498164014688,
						"rouge2_acc_stderr,none": 0.015594753632006533,
						"rouge2_diff,none": -9.401609276652774,
						"rouge2_diff_stderr,none": 1.1190413430569957,
						"rouge2_max,none": 35.75423769869886,
						"rouge2_max_stderr,none": 0.999547150382563,
						"rougeL_acc,none": 0.30599755201958384,
						"rougeL_acc_stderr,none": 0.016132229728155048,
						"rougeL_diff,none": -8.141165872399707,
						"rougeL_diff_stderr,none": 0.9399019251251466,
						"rougeL_max,none": 48.74607552925405,
						"rougeL_max_stderr,none": 0.867035965150387
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2741738066095471,
						"acc_stderr,none": 0.01561651849721937,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.41379840998904505,
						"acc_stderr,none": 0.014258527444255804,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.05019685039370079,
						"exact_match_stderr,none": 0.004845070213000883
					},
					"wic": {
						"acc,none": 0.5376175548589341,
						"acc_stderr,none": 0.019754574200198258,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6378579730841933,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5560171625953843,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.63622300495702,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.739542225730071,
						"acc_stderr,none": 0.012334833671998285,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8424908424908425,
						"acc_stderr,none": 0.0220877280615005,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6221818181818182,
						"acc_stderr,none": 0.07099005621458672,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.021772369465547194,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.022342748192502843,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.02047511809298897,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.742,
						"acc_stderr,none": 0.019586711785215837,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.02238235778196214,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.564,
						"acc_stderr,none": 0.022198954641476802,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.02223197069632112,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.646,
						"acc_stderr,none": 0.021407582047916447,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.020313179231745172,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.02031317923174518,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43890227576974566,
						"acc_stderr,none": 0.046761630567835805,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337343,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4742971887550201,
						"acc_stderr,none": 0.010008822253312039,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4879518072289157,
						"acc_stderr,none": 0.010019162857624489,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.41365461847389556,
						"acc_stderr,none": 0.00987150215909937,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5329317269076306,
						"acc_stderr,none": 0.010000311392557843,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4779116465863454,
						"acc_stderr,none": 0.010012288645591783,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4891566265060241,
						"acc_stderr,none": 0.010019715824483483,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41767068273092367,
						"acc_stderr,none": 0.009885277727840184,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4827309236947791,
						"acc_stderr,none": 0.010016093498409708,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.42449799196787147,
						"acc_stderr,none": 0.009907151253284268,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42329317269076305,
						"acc_stderr,none": 0.009903432138272912,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4526104417670683,
						"acc_stderr,none": 0.009976956772510006,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41566265060240964,
						"acc_stderr,none": 0.00987847434182293,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40562248995983935,
						"acc_stderr,none": 0.009841918156163162,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3497991967871486,
						"acc_stderr,none": 0.009559181474778286,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6328740749654052,
						"acc_stderr,none": 0.06137171904240014,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5903375248180013,
						"acc_stderr,none": 0.012655369030750355,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7769688947716744,
						"acc_stderr,none": 0.010712628906979186,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7227001985440106,
						"acc_stderr,none": 0.01152034254826845,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5823957643944407,
						"acc_stderr,none": 0.01269121138284864,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6015883520847121,
						"acc_stderr,none": 0.01259874393825286,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6743878226340172,
						"acc_stderr,none": 0.012059150226422297,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5446724023825281,
						"acc_stderr,none": 0.01281566654206729,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.684976836532098,
						"acc_stderr,none": 0.011954205387840942,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5545996029119789,
						"acc_stderr,none": 0.012790178438084812,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5817339510258107,
						"acc_stderr,none": 0.012694045150564697,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6472534745201853,
						"acc_stderr,none": 0.012296459788853721,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8161384580804675,
						"acc_stderr,none": 0.0375173364281453,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8769892473118279,
						"acc_stderr,none": 0.006813191726515801,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7382690302398331,
						"acc_stderr,none": 0.0142020856634007,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.8136882129277566,
						"acc_stderr,none": 0.024054621770299663,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.7015873015873015,
						"acc_stderr,none": 0.02582169136035425,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7738095238095238,
						"acc_stderr,none": 0.018653923879063384,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/225-EagleX-PreFT-C"
	},
	"./rwkv-x-dev/225-EagleX-PreFT-D": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6352874859075536,
						"acc_norm,none": 0.6231679819616686,
						"acc_norm_stderr,none": 0.08041336019411056,
						"acc_stderr,none": 0.10197034794778023,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.5115625,
						"acc_stderr,none": 0.05830044230785703,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.18485,
						"acc_stderr,none": 0.23759186027637047,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8257313432835821,
						"acc_stderr,none": 0.1623005120329401,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.25482912332838037,
						"acc_norm,none": 0.25482912332838037,
						"acc_norm_stderr,none": 0.11275977984139783,
						"acc_stderr,none": 0.11275977984139783,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2800897945087205,
						"acc_norm,none": 0.2800897945087205,
						"acc_norm_stderr,none": 0.044117874208275,
						"acc_stderr,none": 0.044117874208275,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.706264907573047,
						"likelihood_diff_stderr,none": 0.5287235708307727,
						"pct_stereotype,none": 0.614788312462731,
						"pct_stereotype_stderr,none": 0.07263883882723758
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05757874015748032,
						"exact_match_stderr,none": 0.005168906242870988
					},
					"glue": {
						"acc,none": 0.6979960695569318,
						"acc_stderr,none": 0.004128462193187022,
						"alias": "glue",
						"f1,none": 0.6882431584745393,
						"f1_stderr,none": 0.0001515380514571536,
						"mcc,none": 0.22221015187422147,
						"mcc_stderr,none": 0.03335949854955807
					},
					"kmmlu": {
						"acc,none": 0.2415535662720185,
						"acc_norm,none": 0.2415535662720185,
						"acc_norm_stderr,none": 0.022898433531358002,
						"acc_stderr,none": 0.022898433531358002,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5834246875685157,
						"acc_norm,none": 0.556,
						"acc_norm_stderr,none": 0.0004947174348697381,
						"acc_stderr,none": 0.06122725853637803,
						"alias": "kobest",
						"f1,none": 0.5723268546245084,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7234620609353775,
						"acc_stderr,none": 0.014326801563305042,
						"alias": "lambada",
						"perplexity,none": 3.410122797308321,
						"perplexity_stderr,none": 0.1583038834382835
					},
					"lambada_cloze": {
						"acc,none": 0.09673976324471181,
						"acc_stderr,none": 0.006624856854572791,
						"alias": "lambada_cloze",
						"perplexity,none": 142.95684752995467,
						"perplexity_stderr,none": 8.832368436321923
					},
					"lambada_multilingual": {
						"acc,none": 0.5518726955171744,
						"acc_stderr,none": 0.08140289273156724,
						"alias": "lambada_multilingual",
						"perplexity,none": 19.093699431548426,
						"perplexity_stderr,none": 7.433965904013415
					},
					"mmlu": {
						"acc,none": 0.4253667568722404,
						"acc_stderr,none": 0.10012788088290377,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.39659936238044635,
						"acc_stderr,none": 0.10524569134438223,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.4821371097521725,
						"acc_stderr,none": 0.08684144963378408,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.48326291842703933,
						"acc_stderr,none": 0.08873269241088375,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3558515699333968,
						"acc_stderr,none": 0.08185326884988259,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.4085166784953868,
						"acc_norm,none": 0.38817012200866924,
						"acc_norm_stderr,none": 0.00011281389634930044,
						"acc_stderr,none": 0.05421592487171651,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4722142857142857,
						"acc_stderr,none": 0.05962409097241472,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.749765404460345,
						"acc_norm,none": 0.6276452242413153,
						"acc_norm_stderr,none": 0.00857755826766751,
						"acc_stderr,none": 0.1549849021135447,
						"alias": "pythia",
						"bits_per_byte,none": 0.6390429593288378,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5572957532429468,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.1209864171497097,
						"perplexity_stderr,none": 0.06089106283496052,
						"word_perplexity,none": 10.683042540349486,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3776595744680851,
						"acc_norm,none": 0.4308510638297872,
						"acc_norm_stderr,none": 0.05346356893246232,
						"acc_stderr,none": 0.0437051977327324,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6857675285348241,
						"acc_stderr,none": 0.06655496415971968,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.345974867039867,
						"acc_stderr,none": 0.0014270248373733974,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3317013463892289,
						"bleu_acc_stderr,none": 0.016482148810241477,
						"bleu_diff,none": -6.498466031495915,
						"bleu_diff_stderr,none": 0.8222930807354809,
						"bleu_max,none": 25.70071645538953,
						"bleu_max_stderr,none": 0.7779999944757384,
						"rouge1_acc,none": 0.2974296205630355,
						"rouge1_acc_stderr,none": 0.016002651487361002,
						"rouge1_diff,none": -8.56120281571675,
						"rouge1_diff_stderr,none": 0.9058862759584512,
						"rouge1_max,none": 51.57744086113379,
						"rouge1_max_stderr,none": 0.8365370283081549,
						"rouge2_acc,none": 0.26560587515299877,
						"rouge2_acc_stderr,none": 0.015461027627253592,
						"rouge2_diff,none": -10.04069292215195,
						"rouge2_diff_stderr,none": 1.0916418789286157,
						"rouge2_max,none": 35.44942224509057,
						"rouge2_max_stderr,none": 0.9864282084632607,
						"rougeL_acc,none": 0.2974296205630355,
						"rougeL_acc_stderr,none": 0.016002651487361002,
						"rougeL_diff,none": -8.758975274140424,
						"rougeL_diff_stderr,none": 0.9222481859258791,
						"rougeL_max,none": 48.4753525594165,
						"rougeL_max_stderr,none": 0.8575658407507397
					},
					"xcopa": {
						"acc,none": 0.6209090909090909,
						"acc_stderr,none": 0.07209579971277716,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43780455153949127,
						"acc_stderr,none": 0.04498881411609002,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6324529210035497,
						"acc_stderr,none": 0.06146832812201086,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8183861541919533,
						"acc_stderr,none": 0.03816379879508567,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6352874859075536,
						"acc_norm,none": 0.6231679819616686,
						"acc_norm_stderr,none": 0.08041336019411056,
						"acc_stderr,none": 0.10197034794778023,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.5115625,
						"acc_stderr,none": 0.05830044230785703,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.633,
						"acc_stderr,none": 0.015249378464171756,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.453,
						"acc_stderr,none": 0.015749255189977596,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.45916666666666667,
						"acc_stderr,none": 0.014391541362656945,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4197952218430034,
						"acc_norm,none": 0.4539249146757679,
						"acc_norm_stderr,none": 0.014549221105171864,
						"acc_stderr,none": 0.014422181226303024,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7415824915824916,
						"acc_norm,none": 0.7066498316498316,
						"acc_norm_stderr,none": 0.009342508331708558,
						"acc_stderr,none": 0.008982741341291298,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.18485,
						"acc_stderr,none": 0.23759186027637047,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.163,
						"acc_stderr,none": 0.008261333113511682,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.3945,
						"acc_stderr,none": 0.010931359582007928,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.071,
						"acc_stderr,none": 0.005744214306500101,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.0050551733292434125,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0065,
						"acc_stderr,none": 0.0017973564602277782,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.164,
						"acc_stderr,none": 0.008281684197466848,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0025,
						"acc_stderr,none": 0.0011169148353275358,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.064,
						"acc_stderr,none": 0.0054742107642788375,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.000706929893933947,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0465,
						"acc_stderr,none": 0.004709561018023934,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.003036876355748373,
						"acc_stderr,none": 0.0011463358249986918,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8257313432835821,
						"acc_stderr,none": 0.1623005120329401,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704168,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987277,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103334,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286412,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.012795613612786524,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.013106173040661763,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666682,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333464,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844884,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036437,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380693,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656799,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033859,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571402,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666655,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178326,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.011075814808567038,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.673,
						"acc_stderr,none": 0.014842213153411239,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.703,
						"acc_stderr,none": 0.014456832294801101,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.0076298239962803134,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381786,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910676,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.015803979428161943,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103305,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.826,
						"acc_stderr,none": 0.011994493230973445,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.607,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.014078856992462621,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.866,
						"acc_stderr,none": 0.01077776229836969,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792946,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996652,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333373,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.762,
						"acc_stderr,none": 0.013473586661967227,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.007088105617246438,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.336,
						"acc_stderr,none": 0.014944140233795023,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.656,
						"acc_stderr,none": 0.015029633724408947,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.617,
						"acc_stderr,none": 0.015380102325652694,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.687,
						"acc_stderr,none": 0.014671272822977885,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.527,
						"acc_stderr,none": 0.015796218551302622,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855748,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118588,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.012795613612786553,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469352,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336666,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.005568393575081342,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.00969892102602498,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.734,
						"acc_stderr,none": 0.013979965645145148,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.776,
						"acc_stderr,none": 0.013190830072364485,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427425,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103294,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578141,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858925,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.564,
						"acc_stderr,none": 0.015689173023144064,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.822,
						"acc_stderr,none": 0.012102167676183589,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319415,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.653,
						"acc_stderr,none": 0.01506047203170662,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.011800434324644598,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523724,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.292,
						"acc_stderr,none": 0.014385511563477343,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.826,
						"acc_stderr,none": 0.011994493230973423,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783207,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.843,
						"acc_stderr,none": 0.011510146979230182,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.00573383613969548,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.00918887563499666,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.412,
						"acc_stderr,none": 0.015572363292015095,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.015615500115072957,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6761467889908257,
						"acc_stderr,none": 0.008184405497036657,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8571428571428571,
						"acc_stderr,none": 0.04718416136255828,
						"alias": "cb",
						"f1,none": 0.6878306878306878,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.25482912332838037,
						"acc_norm,none": 0.25482912332838037,
						"acc_norm_stderr,none": 0.11275977984139783,
						"acc_stderr,none": 0.11275977984139783,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.48484848484848486,
						"acc_norm,none": 0.48484848484848486,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.07872958216222171,
						"acc_stderr,none": 0.07872958216222171,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.0659529705144534,
						"acc_stderr,none": 0.0659529705144534,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445796,
						"acc_stderr,none": 0.06180629713445796,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063838,
						"acc_stderr,none": 0.07213122508063838,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.11236664374387367,
						"acc_stderr,none": 0.11236664374387367,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033673,
						"acc_stderr,none": 0.10083169033033673,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.08742975048915692,
						"acc_stderr,none": 0.08742975048915692,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.07335878043508444,
						"acc_stderr,none": 0.07335878043508444,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.32608695652173914,
						"acc_norm,none": 0.32608695652173914,
						"acc_norm_stderr,none": 0.06988152725357213,
						"acc_stderr,none": 0.06988152725357213,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2800897945087205,
						"acc_norm,none": 0.2800897945087205,
						"acc_norm_stderr,none": 0.044117874208275,
						"acc_stderr,none": 0.044117874208275,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.27218934911242604,
						"acc_norm,none": 0.27218934911242604,
						"acc_norm_stderr,none": 0.034339196275485345,
						"acc_stderr,none": 0.034339196275485345,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2635135135135135,
						"acc_norm,none": 0.2635135135135135,
						"acc_norm_stderr,none": 0.036335000433819875,
						"acc_stderr,none": 0.036335000433819875,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.34375,
						"acc_norm,none": 0.34375,
						"acc_norm_stderr,none": 0.03766668927755763,
						"acc_stderr,none": 0.03766668927755763,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.0340150671524904,
						"acc_stderr,none": 0.0340150671524904,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2966507177033493,
						"acc_norm,none": 0.2966507177033493,
						"acc_norm_stderr,none": 0.03167207801693405,
						"acc_stderr,none": 0.03167207801693405,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.03541088558070894,
						"acc_stderr,none": 0.03541088558070894,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2824427480916031,
						"acc_norm,none": 0.2824427480916031,
						"acc_norm_stderr,none": 0.03948406125768361,
						"acc_stderr,none": 0.03948406125768361,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.27205882352941174,
						"acc_norm,none": 0.27205882352941174,
						"acc_norm_stderr,none": 0.03830122520709327,
						"acc_stderr,none": 0.03830122520709327,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.29906542056074764,
						"acc_norm,none": 0.29906542056074764,
						"acc_norm_stderr,none": 0.044470182376718334,
						"acc_stderr,none": 0.044470182376718334,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2724458204334365,
						"acc_norm,none": 0.2724458204334365,
						"acc_norm_stderr,none": 0.024811030866861566,
						"acc_stderr,none": 0.024811030866861566,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.25980392156862747,
						"acc_norm,none": 0.25980392156862747,
						"acc_norm_stderr,none": 0.030778554678693247,
						"acc_stderr,none": 0.030778554678693247,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.30726256983240224,
						"acc_norm,none": 0.30726256983240224,
						"acc_norm_stderr,none": 0.03458033173302765,
						"acc_stderr,none": 0.03458033173302765,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25738396624472576,
						"acc_norm,none": 0.25738396624472576,
						"acc_norm_stderr,none": 0.028458820991460295,
						"acc_stderr,none": 0.028458820991460295,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3644859813084112,
						"acc_norm,none": 0.3644859813084112,
						"acc_norm_stderr,none": 0.046746602211107734,
						"acc_stderr,none": 0.046746602211107734,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.32075471698113206,
						"acc_norm,none": 0.32075471698113206,
						"acc_norm_stderr,none": 0.04555176317903525,
						"acc_stderr,none": 0.04555176317903525,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809826,
						"acc_stderr,none": 0.039578354719809826,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.25274725274725274,
						"acc_norm,none": 0.25274725274725274,
						"acc_norm_stderr,none": 0.026350722655564394,
						"acc_stderr,none": 0.026350722655564394,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3137254901960784,
						"acc_norm,none": 0.3137254901960784,
						"acc_norm_stderr,none": 0.03256685484460388,
						"acc_stderr,none": 0.03256685484460388,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.27485380116959063,
						"acc_norm,none": 0.27485380116959063,
						"acc_norm_stderr,none": 0.034240429246915824,
						"acc_stderr,none": 0.034240429246915824,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.03694846055443904,
						"acc_stderr,none": 0.03694846055443904,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.29559748427672955,
						"acc_norm,none": 0.29559748427672955,
						"acc_norm_stderr,none": 0.036302143777231344,
						"acc_stderr,none": 0.036302143777231344,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3312883435582822,
						"acc_norm,none": 0.3312883435582822,
						"acc_norm_stderr,none": 0.03697983910025588,
						"acc_stderr,none": 0.03697983910025588,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.034078261673374376,
						"acc_stderr,none": 0.034078261673374376,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.02874673063268137,
						"acc_stderr,none": 0.02874673063268137,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03173071239071724,
						"acc_stderr,none": 0.03173071239071724,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3865546218487395,
						"acc_norm,none": 0.3865546218487395,
						"acc_norm_stderr,none": 0.0316314580755238,
						"acc_stderr,none": 0.0316314580755238,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2826086956521739,
						"acc_norm,none": 0.2826086956521739,
						"acc_norm_stderr,none": 0.029754528538233245,
						"acc_stderr,none": 0.029754528538233245,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.27972027972027974,
						"acc_norm,none": 0.27972027972027974,
						"acc_norm_stderr,none": 0.037667638895398516,
						"acc_stderr,none": 0.037667638895398516,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03366618544627455,
						"acc_stderr,none": 0.03366618544627455,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2684563758389262,
						"acc_norm,none": 0.2684563758389262,
						"acc_norm_stderr,none": 0.036427227538629016,
						"acc_stderr,none": 0.036427227538629016,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2796610169491525,
						"acc_norm,none": 0.2796610169491525,
						"acc_norm_stderr,none": 0.04149459161011112,
						"acc_stderr,none": 0.04149459161011112,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2636363636363636,
						"acc_norm,none": 0.2636363636363636,
						"acc_norm_stderr,none": 0.04220224692971987,
						"acc_stderr,none": 0.04220224692971987,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.03822127078536156,
						"acc_stderr,none": 0.03822127078536156,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.040061680838488774,
						"acc_stderr,none": 0.040061680838488774,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.03253020905593335,
						"acc_stderr,none": 0.03253020905593335,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.034516288762506196,
						"acc_stderr,none": 0.034516288762506196,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25790754257907544,
						"acc_norm,none": 0.25790754257907544,
						"acc_norm_stderr,none": 0.021605737836583285,
						"acc_stderr,none": 0.021605737836583285,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.34579439252336447,
						"acc_norm,none": 0.34579439252336447,
						"acc_norm_stderr,none": 0.03258939533605641,
						"acc_stderr,none": 0.03258939533605641,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2764227642276423,
						"acc_norm,none": 0.2764227642276423,
						"acc_norm_stderr,none": 0.04049015460622489,
						"acc_stderr,none": 0.04049015460622489,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.27049180327868855,
						"acc_norm,none": 0.27049180327868855,
						"acc_norm_stderr,none": 0.04038308168357442,
						"acc_stderr,none": 0.04038308168357442,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.03184006730473941,
						"acc_stderr,none": 0.03184006730473941,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.03387720998298804,
						"acc_stderr,none": 0.03387720998298804,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.291005291005291,
						"acc_norm,none": 0.291005291005291,
						"acc_norm_stderr,none": 0.033127832003565685,
						"acc_stderr,none": 0.033127832003565685,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.04126514736324099,
						"acc_stderr,none": 0.04126514736324099,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2827586206896552,
						"acc_norm,none": 0.2827586206896552,
						"acc_norm_stderr,none": 0.03752833958003336,
						"acc_stderr,none": 0.03752833958003336,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26285714285714284,
						"acc_norm,none": 0.26285714285714284,
						"acc_norm_stderr,none": 0.03337037585221276,
						"acc_stderr,none": 0.03337037585221276,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.25118483412322273,
						"acc_norm,none": 0.25118483412322273,
						"acc_norm_stderr,none": 0.029927771242945208,
						"acc_stderr,none": 0.029927771242945208,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.02251703243459229,
						"acc_stderr,none": 0.02251703243459229,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.030439132051887946,
						"acc_stderr,none": 0.030439132051887946,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.29310344827586204,
						"acc_norm,none": 0.29310344827586204,
						"acc_norm_stderr,none": 0.03460711084041231,
						"acc_stderr,none": 0.03460711084041231,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3053097345132743,
						"acc_norm,none": 0.3053097345132743,
						"acc_norm_stderr,none": 0.03070256598213893,
						"acc_stderr,none": 0.03070256598213893,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624337,
						"acc_stderr,none": 0.03546563019624337,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.032092816451453864,
						"acc_stderr,none": 0.032092816451453864,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.27218934911242604,
						"acc_norm,none": 0.27218934911242604,
						"acc_norm_stderr,none": 0.03433919627548533,
						"acc_stderr,none": 0.03433919627548533,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2670807453416149,
						"acc_norm,none": 0.2670807453416149,
						"acc_norm_stderr,none": 0.03497754822823695,
						"acc_stderr,none": 0.03497754822823695,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.036121818481912725,
						"acc_stderr,none": 0.036121818481912725,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.22221015187422147,
						"mcc_stderr,none": 0.03335949854955807
					},
					"copa": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.03684529491774711,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.706264907573047,
						"likelihood_diff_stderr,none": 0.5287235708307727,
						"pct_stereotype,none": 0.614788312462731,
						"pct_stereotype_stderr,none": 0.07263883882723758
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.990906380441264,
						"likelihood_diff_stderr,none": 0.09352907245910518,
						"pct_stereotype,none": 0.6493738819320215,
						"pct_stereotype_stderr,none": 0.011655543596818134
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.135989010989011,
						"likelihood_diff_stderr,none": 0.40738587261343207,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.04865042554105198
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.4772727272727275,
						"likelihood_diff_stderr,none": 1.645052688246622,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.142307692307693,
						"likelihood_diff_stderr,none": 0.6072223870764847,
						"pct_stereotype,none": 0.7846153846153846,
						"pct_stereotype_stderr,none": 0.051386112368797664
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.7953125,
						"likelihood_diff_stderr,none": 0.16672286055923027,
						"pct_stereotype,none": 0.615625,
						"pct_stereotype_stderr,none": 0.0272358133313715
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.946759259259259,
						"likelihood_diff_stderr,none": 0.263884733333858,
						"pct_stereotype,none": 0.5972222222222222,
						"pct_stereotype_stderr,none": 0.033448873829978666
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.529513888888889,
						"likelihood_diff_stderr,none": 0.3838491395966834,
						"pct_stereotype,none": 0.7777777777777778,
						"pct_stereotype_stderr,none": 0.04933922619854289
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.8585137795275593,
						"likelihood_diff_stderr,none": 0.16821499143604945,
						"pct_stereotype,none": 0.5590551181102362,
						"pct_stereotype_stderr,none": 0.02205034999632727
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.283783783783784,
						"likelihood_diff_stderr,none": 0.38987015213932563,
						"pct_stereotype,none": 0.7747747747747747,
						"pct_stereotype_stderr,none": 0.03982904640716733
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.395161290322581,
						"likelihood_diff_stderr,none": 0.5070192000430794,
						"pct_stereotype,none": 0.8709677419354839,
						"pct_stereotype_stderr,none": 0.03495073154102977
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.405263157894737,
						"likelihood_diff_stderr,none": 0.24437317086781812,
						"pct_stereotype,none": 0.7,
						"pct_stereotype_stderr,none": 0.03333333333333336
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.420468097793679,
						"likelihood_diff_stderr,none": 0.0788339729343734,
						"pct_stereotype,none": 0.5796064400715564,
						"pct_stereotype_stderr,none": 0.012057509734183715
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.345833333333333,
						"likelihood_diff_stderr,none": 0.3026235805800601,
						"pct_stereotype,none": 0.6444444444444445,
						"pct_stereotype_stderr,none": 0.05074011803597718
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.2403846153846154,
						"likelihood_diff_stderr,none": 0.8902586165487254,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.170454545454546,
						"likelihood_diff_stderr,none": 0.5051644442183943,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.05615974350262316
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.90303738317757,
						"likelihood_diff_stderr,none": 0.14047827369791682,
						"pct_stereotype,none": 0.6105919003115264,
						"pct_stereotype_stderr,none": 0.027258566978193188
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.727272727272727,
						"likelihood_diff_stderr,none": 0.20866106607301482,
						"pct_stereotype,none": 0.38735177865612647,
						"pct_stereotype_stderr,none": 0.030687258758503668
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.345486111111111,
						"likelihood_diff_stderr,none": 0.39211321013082234,
						"pct_stereotype,none": 0.6388888888888888,
						"pct_stereotype_stderr,none": 0.057003814617008604
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.2067934782608694,
						"likelihood_diff_stderr,none": 0.15891547899957584,
						"pct_stereotype,none": 0.49782608695652175,
						"pct_stereotype_stderr,none": 0.023337780813399874
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.267391304347826,
						"likelihood_diff_stderr,none": 0.2767311115781506,
						"pct_stereotype,none": 0.7043478260869566,
						"pct_stereotype_stderr,none": 0.04273972288221525
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.2005494505494507,
						"likelihood_diff_stderr,none": 0.32168805989469407,
						"pct_stereotype,none": 0.7802197802197802,
						"pct_stereotype_stderr,none": 0.04364972632898534
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.061224489795919,
						"likelihood_diff_stderr,none": 0.24691233273275842,
						"pct_stereotype,none": 0.7142857142857143,
						"pct_stereotype_stderr,none": 0.032350772404131305
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05757874015748032,
						"exact_match_stderr,none": 0.005168906242870988
					},
					"glue": {
						"acc,none": 0.6979960695569318,
						"acc_stderr,none": 0.004128462193187022,
						"alias": "glue",
						"f1,none": 0.6882431584745393,
						"f1_stderr,none": 0.0001515380514571536,
						"mcc,none": 0.22221015187422147,
						"mcc_stderr,none": 0.03335949854955807
					},
					"hellaswag": {
						"acc,none": 0.5632344154550887,
						"acc_norm,none": 0.7507468631746664,
						"acc_norm_stderr,none": 0.004316965678675091,
						"acc_stderr,none": 0.004949716368890496,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2415535662720185,
						"acc_norm,none": 0.2415535662720185,
						"acc_norm_stderr,none": 0.022898433531358002,
						"acc_stderr,none": 0.022898433531358002,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.040201512610368445,
						"acc_stderr,none": 0.040201512610368445,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.013531522534515441,
						"acc_stderr,none": 0.013531522534515441,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.013663187134877634,
						"acc_stderr,none": 0.013663187134877634,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920845,
						"acc_stderr,none": 0.013512312258920845,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740668,
						"acc_stderr,none": 0.014142984975740668,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.24666666666666667,
						"acc_norm,none": 0.24666666666666667,
						"acc_norm_stderr,none": 0.017613084291727026,
						"acc_stderr,none": 0.017613084291727026,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.224,
						"acc_norm,none": 0.224,
						"acc_norm_stderr,none": 0.013190830072364478,
						"acc_stderr,none": 0.013190830072364478,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.299,
						"acc_norm,none": 0.299,
						"acc_norm_stderr,none": 0.014484778521220478,
						"acc_stderr,none": 0.014484778521220478,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.222,
						"acc_norm,none": 0.222,
						"acc_norm_stderr,none": 0.013148721948877364,
						"acc_stderr,none": 0.013148721948877364,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.030695456590127176,
						"acc_stderr,none": 0.030695456590127176,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.253,
						"acc_norm,none": 0.253,
						"acc_norm_stderr,none": 0.01375427861358708,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.19230769230769232,
						"acc_norm,none": 0.19230769230769232,
						"acc_norm_stderr,none": 0.03469975803447378,
						"acc_stderr,none": 0.03469975803447378,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.044619604333847394,
						"acc_stderr,none": 0.044619604333847394,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.218,
						"acc_norm,none": 0.218,
						"acc_norm_stderr,none": 0.01306317904059529,
						"acc_stderr,none": 0.01306317904059529,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.249,
						"acc_norm,none": 0.249,
						"acc_norm_stderr,none": 0.013681600278702315,
						"acc_stderr,none": 0.013681600278702315,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.249,
						"acc_norm,none": 0.249,
						"acc_norm_stderr,none": 0.01368160027870231,
						"acc_stderr,none": 0.01368160027870231,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.224,
						"acc_norm,none": 0.224,
						"acc_norm_stderr,none": 0.013190830072364464,
						"acc_stderr,none": 0.013190830072364464,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.013663187134877653,
						"acc_stderr,none": 0.013663187134877653,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.223,
						"acc_norm,none": 0.223,
						"acc_norm_stderr,none": 0.013169830843425672,
						"acc_stderr,none": 0.013169830843425672,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.013736254390651148,
						"acc_stderr,none": 0.013736254390651148,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.238,
						"acc_norm,none": 0.238,
						"acc_norm_stderr,none": 0.013473586661967216,
						"acc_stderr,none": 0.013473586661967216,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816506,
						"acc_stderr,none": 0.04229525846816506,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.232,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.013354937452281558,
						"acc_stderr,none": 0.013354937452281558,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.014127086556490528,
						"acc_stderr,none": 0.014127086556490528,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.236,
						"acc_norm,none": 0.236,
						"acc_norm_stderr,none": 0.013434451402438704,
						"acc_stderr,none": 0.013434451402438704,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.226,
						"acc_norm,none": 0.226,
						"acc_norm_stderr,none": 0.01323250161908534,
						"acc_stderr,none": 0.01323250161908534,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.229,
						"acc_norm,none": 0.229,
						"acc_norm_stderr,none": 0.013294199326613614,
						"acc_stderr,none": 0.013294199326613614,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.229,
						"acc_norm,none": 0.229,
						"acc_norm_stderr,none": 0.013294199326613604,
						"acc_stderr,none": 0.013294199326613604,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.01803238600153009,
						"acc_stderr,none": 0.01803238600153009,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.227,
						"acc_norm,none": 0.227,
						"acc_norm_stderr,none": 0.013253174964763914,
						"acc_stderr,none": 0.013253174964763914,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.013550631705555946,
						"acc_stderr,none": 0.013550631705555946,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.227,
						"acc_norm,none": 0.227,
						"acc_norm_stderr,none": 0.013253174964763912,
						"acc_stderr,none": 0.013253174964763912,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.236,
						"acc_norm,none": 0.236,
						"acc_norm_stderr,none": 0.013434451402438685,
						"acc_stderr,none": 0.013434451402438685,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816506,
						"acc_stderr,none": 0.04229525846816506,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.24666666666666667,
						"acc_norm,none": 0.24666666666666667,
						"acc_norm_stderr,none": 0.024929480622100736,
						"acc_stderr,none": 0.024929480622100736,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.01339490288966001,
						"acc_stderr,none": 0.01339490288966001,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.231,
						"acc_norm,none": 0.231,
						"acc_norm_stderr,none": 0.013334797216936436,
						"acc_stderr,none": 0.013334797216936436,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.03027512038907304,
						"acc_stderr,none": 0.03027512038907304,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.012997843819031811,
						"acc_stderr,none": 0.012997843819031811,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440467,
						"acc_stderr,none": 0.013946271849440467,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.029601626330440604,
						"acc_stderr,none": 0.029601626330440604,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145169,
						"acc_stderr,none": 0.013979965645145169,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5834246875685157,
						"acc_norm,none": 0.556,
						"acc_norm_stderr,none": 0.0004947174348697381,
						"acc_stderr,none": 0.06122725853637803,
						"alias": "kobest",
						"f1,none": 0.5723268546245084,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.6566951566951567,
						"acc_stderr,none": 0.01267631539953706,
						"alias": " - kobest_boolq",
						"f1,none": 0.6416151910318004,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.635,
						"acc_stderr,none": 0.015231776226264896,
						"alias": " - kobest_copa",
						"f1,none": 0.6340481592622411,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.428,
						"acc_norm,none": 0.556,
						"acc_norm_stderr,none": 0.022242244375731017,
						"acc_stderr,none": 0.022149790663861926,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.42393203952475095,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.6574307304785895,
						"acc_stderr,none": 0.023847980511930583,
						"alias": " - kobest_sentineg",
						"f1,none": 0.6375402792696026,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4992063492063492,
						"acc_stderr,none": 0.014091479467428242,
						"alias": " - kobest_wic",
						"f1,none": 0.48447411574530663,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7234620609353775,
						"acc_stderr,none": 0.014326801563305042,
						"alias": "lambada",
						"perplexity,none": 3.410122797308321,
						"perplexity_stderr,none": 0.1583038834382835
					},
					"lambada_cloze": {
						"acc,none": 0.09673976324471181,
						"acc_stderr,none": 0.006624856854572791,
						"alias": "lambada_cloze",
						"perplexity,none": 142.95684752995467,
						"perplexity_stderr,none": 8.832368436321923
					},
					"lambada_multilingual": {
						"acc,none": 0.5518726955171744,
						"acc_stderr,none": 0.08140289273156724,
						"alias": "lambada_multilingual",
						"perplexity,none": 19.093699431548426,
						"perplexity_stderr,none": 7.433965904013415
					},
					"lambada_openai": {
						"acc,none": 0.7510188239860276,
						"acc_stderr,none": 0.006024496287103944,
						"alias": " - lambada_openai",
						"perplexity,none": 3.1209864171497097,
						"perplexity_stderr,none": 0.06089106283496052
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.08635746167281196,
						"acc_stderr,none": 0.00391336325598078,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 127.75508041720562,
						"perplexity_stderr,none": 4.1193441705681835
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4418785173685232,
						"acc_stderr,none": 0.006918753955722862,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 31.746014024164424,
						"perplexity_stderr,none": 1.75734954046411
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7496603920046575,
						"acc_stderr,none": 0.00603544281761281,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.123121730158802,
						"perplexity_stderr,none": 0.06088404355169919
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.47816805744226665,
						"acc_stderr,none": 0.0069593340494832475,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 25.003910874716293,
						"perplexity_stderr,none": 1.220526708160942
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5625849019988356,
						"acc_stderr,none": 0.0069111925667317935,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 15.13217140112514,
						"perplexity_stderr,none": 0.7297857500094299
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5270716087715893,
						"acc_stderr,none": 0.006955759823355592,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 20.463279127577458,
						"perplexity_stderr,none": 1.080562451163269
					},
					"lambada_standard": {
						"acc,none": 0.697651853289346,
						"acc_stderr,none": 0.006398602102697932,
						"alias": " - lambada_standard",
						"perplexity,none": 3.6971268007033427,
						"perplexity_stderr,none": 0.07228934104494762
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.10712206481661168,
						"acc_stderr,none": 0.004308713186753709,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 158.15861464270372,
						"perplexity_stderr,none": 4.84750663534671
					},
					"logiqa": {
						"acc,none": 0.24270353302611367,
						"acc_norm,none": 0.2964669738863287,
						"acc_norm_stderr,none": 0.017913222760382753,
						"acc_stderr,none": 0.016815676206479533,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2582697201017812,
						"acc_norm,none": 0.28880407124681934,
						"acc_norm_stderr,none": 0.011434263441269486,
						"acc_stderr,none": 0.011042608058378036,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.24958123953098826,
						"acc_norm,none": 0.2536013400335008,
						"acc_norm_stderr,none": 0.007964559996672166,
						"acc_stderr,none": 0.007922429819042544,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3442067358610464,
						"acc_stderr,none": 0.004889721898055986,
						"alias": "mc_taco",
						"f1,none": 0.5069278547539418,
						"f1_stderr,none": 0.0054560610559344054
					},
					"medmcqa": {
						"acc,none": 0.3858474778866842,
						"acc_norm,none": 0.3858474778866842,
						"acc_norm_stderr,none": 0.007527555019766015,
						"acc_stderr,none": 0.007527555019766015,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3982717989002357,
						"acc_norm,none": 0.3982717989002357,
						"acc_norm_stderr,none": 0.013726076188490187,
						"acc_stderr,none": 0.013726076188490187,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.4253667568722404,
						"acc_stderr,none": 0.10012788088290377,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621505,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.43703703703703706,
						"acc_stderr,none": 0.04284958639753399,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.40789473684210525,
						"acc_stderr,none": 0.03999309712777471,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.44150943396226416,
						"acc_stderr,none": 0.030561590426731844,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4861111111111111,
						"acc_stderr,none": 0.04179596617581002,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.37572254335260113,
						"acc_stderr,none": 0.036928207672648664,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.043898699568087785,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.05024183937956912,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.37446808510638296,
						"acc_stderr,none": 0.03163910665367291,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2543859649122807,
						"acc_stderr,none": 0.040969851398436695,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.45517241379310347,
						"acc_stderr,none": 0.04149886942192117,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.30687830687830686,
						"acc_stderr,none": 0.02375292871211213,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.04073524322147126,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252604,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.47419354838709676,
						"acc_stderr,none": 0.028406095057653326,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.35960591133004927,
						"acc_stderr,none": 0.033764582465095665,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.048783173121456316,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5575757575757576,
						"acc_stderr,none": 0.038783721137112745,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.45454545454545453,
						"acc_stderr,none": 0.03547601494006938,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.6113989637305699,
						"acc_stderr,none": 0.03517739796373134,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3923076923076923,
						"acc_stderr,none": 0.024756000382130956,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.27037037037037037,
						"acc_stderr,none": 0.027080372815145668,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.36134453781512604,
						"acc_stderr,none": 0.031204691225150013,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2185430463576159,
						"acc_stderr,none": 0.03374235550425694,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5577981651376147,
						"acc_stderr,none": 0.02129361320752021,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.029157522184605607,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5686274509803921,
						"acc_stderr,none": 0.034760990605016355,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.6666666666666666,
						"acc_stderr,none": 0.030685820596610815,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4663677130044843,
						"acc_stderr,none": 0.033481800170603065,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5572519083969466,
						"acc_stderr,none": 0.04356447202665069,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.39659936238044635,
						"acc_stderr,none": 0.10524569134438223,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.47107438016528924,
						"acc_stderr,none": 0.04556710331269498,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5370370370370371,
						"acc_stderr,none": 0.04820403072760627,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.4171779141104294,
						"acc_stderr,none": 0.03874102859818081,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841044,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5533980582524272,
						"acc_stderr,none": 0.04922424153458933,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6752136752136753,
						"acc_stderr,none": 0.03067902276549883,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.050161355804659205,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.6066411238825032,
						"acc_stderr,none": 0.017468556724503162,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.4190751445086705,
						"acc_stderr,none": 0.02656417811142261,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23575418994413408,
						"acc_stderr,none": 0.014196375686290804,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.4542483660130719,
						"acc_stderr,none": 0.028509807802626564,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.4821371097521725,
						"acc_stderr,none": 0.08684144963378408,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4887459807073955,
						"acc_stderr,none": 0.028390897396863533,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4845679012345679,
						"acc_stderr,none": 0.0278074900442762,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.3120567375886525,
						"acc_stderr,none": 0.02764012054516993,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.33116036505867014,
						"acc_stderr,none": 0.012020128195985752,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.40808823529411764,
						"acc_stderr,none": 0.029855261393483924,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.434640522875817,
						"acc_stderr,none": 0.02005426920072646,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4818181818181818,
						"acc_stderr,none": 0.04785964010794916,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.42857142857142855,
						"acc_stderr,none": 0.031680911612338825,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.48326291842703933,
						"acc_stderr,none": 0.08873269241088375,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.7213930348258707,
						"acc_stderr,none": 0.031700561834973086,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3558515699333968,
						"acc_stderr,none": 0.08185326884988259,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3855421686746988,
						"acc_stderr,none": 0.037891344246115496,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6257309941520468,
						"acc_stderr,none": 0.03711601185389481,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7802343352012227,
						"acc_stderr,none": 0.004179933984206167,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.781326281529699,
						"acc_stderr,none": 0.004168844187236536,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.696078431372549,
						"acc_stderr,none": 0.022798834443163555,
						"alias": "mrpc",
						"f1,none": 0.8176470588235294,
						"f1_stderr,none": 0.016072129459324066
					},
					"multimedqa": {
						"acc,none": 0.4085166784953868,
						"acc_norm,none": 0.38817012200866924,
						"acc_norm_stderr,none": 0.00011281389634930044,
						"acc_stderr,none": 0.05421592487171651,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5138201320132013,
						"acc_stderr,none": 0.007179059189771664,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7136004529472937,
						"mrr_stderr,none": 0.010353662494870028,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407455,
						"r@2,none": 0.3972911963882618,
						"r@2_stderr,none": 0.016448890253661457
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6548156525359465,
						"mrr_stderr,none": 0.010405762138330232,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4706546275395034,
						"r@2_stderr,none": 0.016778343895001425
					},
					"openbookqa": {
						"acc,none": 0.312,
						"acc_norm,none": 0.418,
						"acc_norm_stderr,none": 0.022080014812228137,
						"acc_stderr,none": 0.02074059653648808,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.398,
						"acc_stderr,none": 0.01094796460372824,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.366,
						"acc_stderr,none": 0.010774044738166446,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4115,
						"acc_stderr,none": 0.011006563824537298,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5435,
						"acc_stderr,none": 0.011140733053371406,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.527,
						"acc_stderr,none": 0.01116681910502999,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5305,
						"acc_stderr,none": 0.011162310405413186,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.529,
						"acc_stderr,none": 0.011164310140373716,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4722142857142857,
						"acc_stderr,none": 0.05962409097241472,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7780195865070729,
						"acc_norm,none": 0.7840043525571273,
						"acc_norm_stderr,none": 0.009601236303553548,
						"acc_stderr,none": 0.009696120744662005,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.261368488471392,
						"acc_norm,none": 0.2834116140051238,
						"acc_norm_stderr,none": 0.003292432314345715,
						"acc_stderr,none": 0.0032100639192897936,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.021912377885779964,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.749765404460345,
						"acc_norm,none": 0.6276452242413153,
						"acc_norm_stderr,none": 0.00857755826766751,
						"acc_stderr,none": 0.1549849021135447,
						"alias": "pythia",
						"bits_per_byte,none": 0.6390429593288378,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5572957532429468,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.1209864171497097,
						"perplexity_stderr,none": 0.06089106283496052,
						"word_perplexity,none": 10.683042540349486,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3776595744680851,
						"acc_norm,none": 0.4308510638297872,
						"acc_norm_stderr,none": 0.05346356893246232,
						"acc_stderr,none": 0.0437051977327324,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.425,
						"acc_norm,none": 0.525,
						"acc_norm_stderr,none": 0.045777595341980594,
						"acc_stderr,none": 0.045316348358748273,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.325,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.03920394987159571,
						"acc_stderr,none": 0.03714454174077365,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3873239436619718,
						"acc_norm,none": 0.39436619718309857,
						"acc_norm_stderr,none": 0.029051039507650152,
						"acc_stderr,none": 0.02895738957595096,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.006765015986877446,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.6810042047984171,
						"acc_stderr,none": 0.0023180397355351888,
						"alias": "qqp",
						"f1,none": 0.6871558520315343,
						"f1_stderr,none": 0.002616288064452211
					},
					"race": {
						"acc,none": 0.3588516746411483,
						"acc_stderr,none": 0.014845215125262316,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2523,
						"em_stderr,none": 0.004343542061010367,
						"f1,none": 0.26162857165932657,
						"f1_stderr,none": 0.004357783329294587
					},
					"rte": {
						"acc,none": 0.7075812274368231,
						"acc_stderr,none": 0.027380175972575613,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.943,
						"acc_norm,none": 0.941,
						"acc_norm_stderr,none": 0.0074548356504067275,
						"acc_stderr,none": 0.007335175853706815,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.703971119133574,
						"acc_stderr,none": 0.027478303862979354,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.9094036697247706,
						"acc_stderr,none": 0.009725783032052368,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5878736379086275,
						"acc_norm,none": 0.77666699990003,
						"acc_norm_stderr,none": 0.002944588410642628,
						"acc_stderr,none": 0.0034800694629802255,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6857675285348241,
						"acc_stderr,none": 0.06655496415971968,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6233974358974359,
						"acc_stderr,none": 0.004849462513385676,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8625722103982973,
						"acc_stderr,none": 0.0034662865042031426,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5757843137254902,
						"acc_stderr,none": 0.004893780435310509,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.345974867039867,
						"acc_stderr,none": 0.0014270248373733974,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3317013463892289,
						"bleu_acc_stderr,none": 0.016482148810241477,
						"bleu_diff,none": -6.498466031495915,
						"bleu_diff_stderr,none": 0.8222930807354809,
						"bleu_max,none": 25.70071645538953,
						"bleu_max_stderr,none": 0.7779999944757384,
						"rouge1_acc,none": 0.2974296205630355,
						"rouge1_acc_stderr,none": 0.016002651487361002,
						"rouge1_diff,none": -8.56120281571675,
						"rouge1_diff_stderr,none": 0.9058862759584512,
						"rouge1_max,none": 51.57744086113379,
						"rouge1_max_stderr,none": 0.8365370283081549,
						"rouge2_acc,none": 0.26560587515299877,
						"rouge2_acc_stderr,none": 0.015461027627253592,
						"rouge2_diff,none": -10.04069292215195,
						"rouge2_diff_stderr,none": 1.0916418789286157,
						"rouge2_max,none": 35.44942224509057,
						"rouge2_max_stderr,none": 0.9864282084632607,
						"rougeL_acc,none": 0.2974296205630355,
						"rougeL_acc_stderr,none": 0.016002651487361002,
						"rougeL_diff,none": -8.758975274140424,
						"rougeL_diff_stderr,none": 0.9222481859258791,
						"rougeL_max,none": 48.4753525594165,
						"rougeL_max_stderr,none": 0.8575658407507397
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3317013463892289,
						"bleu_acc_stderr,none": 0.016482148810241477,
						"bleu_diff,none": -6.498466031495915,
						"bleu_diff_stderr,none": 0.8222930807354809,
						"bleu_max,none": 25.70071645538953,
						"bleu_max_stderr,none": 0.7779999944757384,
						"rouge1_acc,none": 0.2974296205630355,
						"rouge1_acc_stderr,none": 0.016002651487361002,
						"rouge1_diff,none": -8.56120281571675,
						"rouge1_diff_stderr,none": 0.9058862759584512,
						"rouge1_max,none": 51.57744086113379,
						"rouge1_max_stderr,none": 0.8365370283081549,
						"rouge2_acc,none": 0.26560587515299877,
						"rouge2_acc_stderr,none": 0.015461027627253592,
						"rouge2_diff,none": -10.04069292215195,
						"rouge2_diff_stderr,none": 1.0916418789286157,
						"rouge2_max,none": 35.44942224509057,
						"rouge2_max_stderr,none": 0.9864282084632607,
						"rougeL_acc,none": 0.2974296205630355,
						"rougeL_acc_stderr,none": 0.016002651487361002,
						"rougeL_diff,none": -8.758975274140424,
						"rougeL_diff_stderr,none": 0.9222481859258791,
						"rougeL_max,none": 48.4753525594165,
						"rougeL_max_stderr,none": 0.8575658407507397
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2766217870257038,
						"acc_stderr,none": 0.015659605755326926,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.41532794705403026,
						"acc_stderr,none": 0.01423738605665655,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.05757874015748032,
						"exact_match_stderr,none": 0.005168906242870988
					},
					"wic": {
						"acc,none": 0.5078369905956113,
						"acc_stderr,none": 0.01980828765781382,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6390429593288378,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5572957532429468,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.683042540349486,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7411207576953434,
						"acc_stderr,none": 0.012310515810993378,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.04744733393277919,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8498168498168498,
						"acc_stderr,none": 0.021661514699106654,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6209090909090909,
						"acc_stderr,none": 0.07209579971277716,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.021772369465547194,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.02236516042423134,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.020475118092988968,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.019536923574747605,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.02226169729227013,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.022252153078595897,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.0213237286328075,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.020435342091896146,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.02031317923174518,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43780455153949127,
						"acc_stderr,none": 0.04498881411609002,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337342,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4795180722891566,
						"acc_stderr,none": 0.010013660629930818,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4847389558232932,
						"acc_stderr,none": 0.010017403508578977,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.41606425702811245,
						"acc_stderr,none": 0.009879848511479758,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5333333333333333,
						"acc_stderr,none": 0.009999776793187642,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4678714859437751,
						"acc_stderr,none": 0.010001361068173077,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4823293172690763,
						"acc_stderr,none": 0.010015812066461167,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42409638554216866,
						"acc_stderr,none": 0.009905918244994481,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.46907630522088356,
						"acc_stderr,none": 0.010002886789051675,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.42329317269076305,
						"acc_stderr,none": 0.009903432138272918,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.4321285140562249,
						"acc_stderr,none": 0.009929309430958677,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44457831325301206,
						"acc_stderr,none": 0.00996031572634482,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41365461847389556,
						"acc_stderr,none": 0.009871502159099365,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40602409638554215,
						"acc_stderr,none": 0.00984346200738422,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3546184738955823,
						"acc_stderr,none": 0.009589070127861869,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6324529210035497,
						"acc_stderr,none": 0.06146832812201086,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5969556585043018,
						"acc_stderr,none": 0.012622895215907709,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7756452680344142,
						"acc_stderr,none": 0.010735214264503254,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7213765718067505,
						"acc_stderr,none": 0.011537224908075907,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5751158173395102,
						"acc_stderr,none": 0.012721094073523329,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.600264725347452,
						"acc_stderr,none": 0.012605764077627153,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6737260092653872,
						"acc_stderr,none": 0.012065474625979069,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5459960291197882,
						"acc_stderr,none": 0.012812565368728929,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6896095301125083,
						"acc_stderr,none": 0.011906040152499258,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5532759761747187,
						"acc_stderr,none": 0.012793874526730203,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5777630708140304,
						"acc_stderr,none": 0.012710555263676445,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6472534745201853,
						"acc_stderr,none": 0.01229645978885372,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8183861541919533,
						"acc_stderr,none": 0.03816379879508567,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8808602150537634,
						"acc_stderr,none": 0.006719915957605396,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.0506639425494172,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7434827945776851,
						"acc_stderr,none": 0.014109478326566513,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.8136882129277566,
						"acc_stderr,none": 0.024054621770299663,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.7047619047619048,
						"acc_stderr,none": 0.02574201764583702,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7658730158730159,
						"acc_stderr,none": 0.0188807884850783,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/225-EagleX-PreFT-D"
	},
	"./rwkv-x-dev/225-EagleX-PreFT-E": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6347237880496054,
						"acc_norm,none": 0.6161217587373168,
						"acc_norm_stderr,none": 0.07872751684307784,
						"acc_stderr,none": 0.1029029183695005,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.5134375,
						"acc_stderr,none": 0.053447921143282486,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.1614,
						"acc_stderr,none": 0.22839176262386612,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8318059701492537,
						"acc_stderr,none": 0.15260457753195683,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.25111441307578003,
						"acc_norm,none": 0.25111441307578003,
						"acc_norm_stderr,none": 0.11122572965740272,
						"acc_stderr,none": 0.11122572965740272,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2776722500431705,
						"acc_norm,none": 0.2776722500431705,
						"acc_norm_stderr,none": 0.0435403947842275,
						"acc_stderr,none": 0.0435403947842275,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.75977377757901,
						"likelihood_diff_stderr,none": 0.5415490435210182,
						"pct_stereotype,none": 0.6161299940369708,
						"pct_stereotype_stderr,none": 0.06954766232246751
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.06594488188976377,
						"exact_match_stderr,none": 0.005507085737903648
					},
					"glue": {
						"acc,none": 0.7301149057803601,
						"acc_stderr,none": 0.04849063916526982,
						"alias": "glue",
						"f1,none": 0.7164277774601332,
						"f1_stderr,none": 0.00010767812473300899,
						"mcc,none": 0.16816948860566025,
						"mcc_stderr,none": 0.0009434042474712849
					},
					"kmmlu": {
						"acc,none": 0.26260467802483406,
						"acc_norm,none": 0.26260467802483406,
						"acc_norm_stderr,none": 0.02512452642389409,
						"acc_stderr,none": 0.02512452642389409,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5860556895417671,
						"acc_norm,none": 0.538,
						"acc_norm_stderr,none": 0.0004981082164328662,
						"acc_stderr,none": 0.06319725476338278,
						"alias": "kobest",
						"f1,none": 0.5743844586651707,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7202600426935766,
						"acc_stderr,none": 0.014994411202277305,
						"alias": "lambada",
						"perplexity,none": 3.4552162142396714,
						"perplexity_stderr,none": 0.15310669389137432
					},
					"lambada_cloze": {
						"acc,none": 0.09965068891907626,
						"acc_stderr,none": 0.006811299711462279,
						"alias": "lambada_cloze",
						"perplexity,none": 148.41685558245132,
						"perplexity_stderr,none": 5.774887060775421
					},
					"lambada_multilingual": {
						"acc,none": 0.5522220065980982,
						"acc_stderr,none": 0.07700281923150025,
						"alias": "lambada_multilingual",
						"perplexity,none": 19.08669925072525,
						"perplexity_stderr,none": 7.254542912950282
					},
					"mmlu": {
						"acc,none": 0.4336989032901296,
						"acc_stderr,none": 0.0990729344620139,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.40403825717322,
						"acc_stderr,none": 0.11040602815759773,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.48793047956227875,
						"acc_stderr,none": 0.08230300413372368,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.49041273968150795,
						"acc_stderr,none": 0.08156911679174601,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3691722169362512,
						"acc_stderr,none": 0.08145667568838086,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.41022001419446413,
						"acc_norm,none": 0.3851143542470191,
						"acc_norm_stderr,none": 0.0001182188479162057,
						"acc_stderr,none": 0.0585200812055347,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4734999999999999,
						"acc_stderr,none": 0.057672983113210445,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7547764662970767,
						"acc_norm,none": 0.6207284168810908,
						"acc_norm_stderr,none": 0.008268959432667734,
						"acc_stderr,none": 0.14733170535246046,
						"alias": "pythia",
						"bits_per_byte,none": 0.6396265091587476,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5579257848582162,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.1799783556168073,
						"perplexity_stderr,none": 0.0619783536022585,
						"word_perplexity,none": 10.70617463269285,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.36879432624113473,
						"acc_norm,none": 0.4219858156028369,
						"acc_norm_stderr,none": 0.05567886322575791,
						"acc_stderr,none": 0.04236144642383186,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6732221889454594,
						"acc_stderr,none": 0.08418482277632774,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.34561677724581125,
						"acc_stderr,none": 0.0015002876271933555,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.016339170373280903,
						"bleu_diff,none": -6.966932621926429,
						"bleu_diff_stderr,none": 0.824564524663069,
						"bleu_max,none": 25.595486127300646,
						"bleu_max_stderr,none": 0.7862675161693766,
						"rouge1_acc,none": 0.2937576499388005,
						"rouge1_acc_stderr,none": 0.015945068581236614,
						"rouge1_diff,none": -9.129749584829064,
						"rouge1_diff_stderr,none": 0.8843025304565784,
						"rouge1_max,none": 51.24160955366627,
						"rouge1_max_stderr,none": 0.8471568075415284,
						"rouge2_acc,none": 0.2582619339045288,
						"rouge2_acc_stderr,none": 0.015321821688476189,
						"rouge2_diff,none": -10.690393479392183,
						"rouge2_diff_stderr,none": 1.0773969464432287,
						"rouge2_max,none": 35.20192789830806,
						"rouge2_max_stderr,none": 0.9877722884597451,
						"rougeL_acc,none": 0.29253365973072215,
						"rougeL_acc_stderr,none": 0.015925597445286165,
						"rougeL_diff,none": -9.285548313850288,
						"rougeL_diff_stderr,none": 0.9013542036926124,
						"rougeL_max,none": 48.276105917652785,
						"rougeL_max_stderr,none": 0.8602935524860347
					},
					"xcopa": {
						"acc,none": 0.6230909090909091,
						"acc_stderr,none": 0.07193581940134663,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.438714859437751,
						"acc_stderr,none": 0.04644427584995752,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6332952289272606,
						"acc_stderr,none": 0.05426600506573858,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8125421443020904,
						"acc_stderr,none": 0.03966825911669438,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6347237880496054,
						"acc_norm,none": 0.6161217587373168,
						"acc_norm_stderr,none": 0.07872751684307784,
						"acc_stderr,none": 0.1029029183695005,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.5134375,
						"acc_stderr,none": 0.053447921143282486,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.015325105508898134,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.461,
						"acc_stderr,none": 0.015771104201283186,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.465,
						"acc_stderr,none": 0.014404353664908238,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.41723549488054607,
						"acc_norm,none": 0.45051194539249145,
						"acc_norm_stderr,none": 0.014539646098471627,
						"acc_stderr,none": 0.014409825518403077,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.742003367003367,
						"acc_norm,none": 0.6978114478114478,
						"acc_norm_stderr,none": 0.009422719042483181,
						"acc_stderr,none": 0.008977970005203404,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.1614,
						"acc_stderr,none": 0.22839176262386612,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0925,
						"acc_stderr,none": 0.006480190694394501,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.2855,
						"acc_stderr,none": 0.01010177696986899,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.065,
						"acc_stderr,none": 0.005513864466114151,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.9225,
						"acc_stderr,none": 0.005980364318224231,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0045,
						"acc_stderr,none": 0.0014969954902233232,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.156,
						"acc_stderr,none": 0.008115721315214952,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.003,
						"acc_stderr,none": 0.0012232122154647144,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0545,
						"acc_stderr,none": 0.005077180702116196,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.000706929893933947,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.034,
						"acc_stderr,none": 0.004053420174069569,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.003036876355748373,
						"acc_stderr,none": 0.001146335824998688,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8318059701492537,
						"acc_stderr,none": 0.15260457753195683,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.003148000938676768,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578028,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745911,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817133,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.811,
						"acc_stderr,none": 0.012386784588117723,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557843976,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333366,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666668,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306523,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426112,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639233,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666683,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306496,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832024,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.01005510343582333,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.683,
						"acc_stderr,none": 0.014721675438880227,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.014205696104091512,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.00751375115747492,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656804,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.674,
						"acc_stderr,none": 0.014830507204541037,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151096,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731977,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.015387682761897068,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.01411109928825958,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504389,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.00682976175614093,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248106,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291603,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.775,
						"acc_stderr,none": 0.013211720158614755,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706824,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.378,
						"acc_stderr,none": 0.015341165254026644,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.659,
						"acc_stderr,none": 0.01499813134840272,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.711,
						"acc_stderr,none": 0.014341711358296186,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.585,
						"acc_stderr,none": 0.015589035185604633,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.010131468138757,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333447,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.788,
						"acc_stderr,none": 0.012931481864938057,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.001000000000000005,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151108,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389644,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653892,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.729,
						"acc_stderr,none": 0.014062601350986184,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.013106173040661745,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.0062736240211187615,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866446,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578078,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.601,
						"acc_stderr,none": 0.015493193313162908,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.567,
						"acc_stderr,none": 0.015676630912181327,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.0118004343246446,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033843,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.649,
						"acc_stderr,none": 0.015100563798316407,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.011569479368271298,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400227,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.248,
						"acc_stderr,none": 0.013663187134877658,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.824,
						"acc_stderr,none": 0.012048616898597498,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662763,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042963,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.005814534272734958,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787731,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.408,
						"acc_stderr,none": 0.015549205052920678,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.425,
						"acc_stderr,none": 0.015640320317040112,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6688073394495413,
						"acc_stderr,none": 0.008231583858517829,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.04459412925079224,
						"alias": "cb",
						"f1,none": 0.7007651189602767,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.25111441307578003,
						"acc_norm,none": 0.25111441307578003,
						"acc_norm_stderr,none": 0.11122572965740272,
						"acc_stderr,none": 0.11122572965740272,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.48484848484848486,
						"acc_norm,none": 0.48484848484848486,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757577,
						"acc_stderr,none": 0.07575757575757577,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.0659529705144534,
						"acc_stderr,none": 0.0659529705144534,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445796,
						"acc_stderr,none": 0.06180629713445796,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063838,
						"acc_stderr,none": 0.07213122508063838,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.11236664374387367,
						"acc_stderr,none": 0.11236664374387367,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271772,
						"acc_stderr,none": 0.10163945352271772,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033673,
						"acc_stderr,none": 0.10083169033033673,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.3409090909090909,
						"acc_norm,none": 0.3409090909090909,
						"acc_norm_stderr,none": 0.07228658768525041,
						"acc_stderr,none": 0.07228658768525041,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.06859222936927092,
						"acc_stderr,none": 0.06859222936927092,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2776722500431705,
						"acc_norm,none": 0.2776722500431705,
						"acc_norm_stderr,none": 0.0435403947842275,
						"acc_stderr,none": 0.0435403947842275,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676976,
						"acc_stderr,none": 0.03410167836676976,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.34375,
						"acc_norm,none": 0.34375,
						"acc_norm_stderr,none": 0.03766668927755763,
						"acc_stderr,none": 0.03766668927755763,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.0340150671524904,
						"acc_stderr,none": 0.0340150671524904,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3014354066985646,
						"acc_norm,none": 0.3014354066985646,
						"acc_norm_stderr,none": 0.03181769753423362,
						"acc_stderr,none": 0.03181769753423362,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.03515674134876764,
						"acc_stderr,none": 0.03515674134876764,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2824427480916031,
						"acc_norm,none": 0.2824427480916031,
						"acc_norm_stderr,none": 0.03948406125768361,
						"acc_stderr,none": 0.03948406125768361,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.037970424962817856,
						"acc_stderr,none": 0.037970424962817856,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2897196261682243,
						"acc_norm,none": 0.2897196261682243,
						"acc_norm_stderr,none": 0.0440606533474851,
						"acc_stderr,none": 0.0440606533474851,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2693498452012384,
						"acc_norm,none": 0.2693498452012384,
						"acc_norm_stderr,none": 0.024722089230802036,
						"acc_stderr,none": 0.024722089230802036,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.030964517926923382,
						"acc_stderr,none": 0.030964517926923382,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2849162011173184,
						"acc_norm,none": 0.2849162011173184,
						"acc_norm_stderr,none": 0.033831950813285244,
						"acc_stderr,none": 0.033831950813285244,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422647,
						"acc_stderr,none": 0.028146970599422647,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3644859813084112,
						"acc_norm,none": 0.3644859813084112,
						"acc_norm_stderr,none": 0.046746602211107734,
						"acc_stderr,none": 0.046746602211107734,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.32075471698113206,
						"acc_norm,none": 0.32075471698113206,
						"acc_norm_stderr,none": 0.04555176317903525,
						"acc_stderr,none": 0.04555176317903525,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809826,
						"acc_stderr,none": 0.039578354719809826,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2490842490842491,
						"acc_norm,none": 0.2490842490842491,
						"acc_norm_stderr,none": 0.02622311550050611,
						"acc_stderr,none": 0.02622311550050611,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.29901960784313725,
						"acc_norm,none": 0.29901960784313725,
						"acc_norm_stderr,none": 0.03213325717373618,
						"acc_stderr,none": 0.03213325717373618,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.26900584795321636,
						"acc_norm,none": 0.26900584795321636,
						"acc_norm_stderr,none": 0.0340105262010409,
						"acc_stderr,none": 0.0340105262010409,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.03694846055443904,
						"acc_stderr,none": 0.03694846055443904,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.2893081761006289,
						"acc_norm,none": 0.2893081761006289,
						"acc_norm_stderr,none": 0.03607384789794788,
						"acc_stderr,none": 0.03607384789794788,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3128834355828221,
						"acc_norm,none": 0.3128834355828221,
						"acc_norm_stderr,none": 0.036429145782924055,
						"acc_stderr,none": 0.036429145782924055,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.26744186046511625,
						"acc_norm,none": 0.26744186046511625,
						"acc_norm_stderr,none": 0.033848364281578606,
						"acc_stderr,none": 0.033848364281578606,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2976190476190476,
						"acc_norm,none": 0.2976190476190476,
						"acc_norm_stderr,none": 0.02885890598472122,
						"acc_stderr,none": 0.02885890598472122,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03173071239071724,
						"acc_stderr,none": 0.03173071239071724,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3865546218487395,
						"acc_norm,none": 0.3865546218487395,
						"acc_norm_stderr,none": 0.03163145807552379,
						"acc_stderr,none": 0.03163145807552379,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2782608695652174,
						"acc_norm,none": 0.2782608695652174,
						"acc_norm_stderr,none": 0.029614094221633722,
						"acc_stderr,none": 0.029614094221633722,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695624,
						"acc_stderr,none": 0.03737392962695624,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2784090909090909,
						"acc_norm,none": 0.2784090909090909,
						"acc_norm_stderr,none": 0.03388193526335356,
						"acc_stderr,none": 0.03388193526335356,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2751677852348993,
						"acc_norm,none": 0.2751677852348993,
						"acc_norm_stderr,none": 0.03671019403342563,
						"acc_stderr,none": 0.03671019403342563,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.288135593220339,
						"acc_norm,none": 0.288135593220339,
						"acc_norm_stderr,none": 0.04187011593049808,
						"acc_stderr,none": 0.04187011593049808,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.27972027972027974,
						"acc_norm,none": 0.27972027972027974,
						"acc_norm_stderr,none": 0.037667638895398536,
						"acc_stderr,none": 0.037667638895398536,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.040061680838488774,
						"acc_stderr,none": 0.040061680838488774,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.032314709966177586,
						"acc_stderr,none": 0.032314709966177586,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.034300856070148836,
						"acc_stderr,none": 0.034300856070148836,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26034063260340634,
						"acc_norm,none": 0.26034063260340634,
						"acc_norm_stderr,none": 0.021671797319809193,
						"acc_stderr,none": 0.021671797319809193,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3411214953271028,
						"acc_norm,none": 0.3411214953271028,
						"acc_norm_stderr,none": 0.03248384363697549,
						"acc_stderr,none": 0.03248384363697549,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2845528455284553,
						"acc_norm,none": 0.2845528455284553,
						"acc_norm_stderr,none": 0.040849837332392225,
						"acc_stderr,none": 0.040849837332392225,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2786885245901639,
						"acc_norm,none": 0.2786885245901639,
						"acc_norm_stderr,none": 0.040759446590692514,
						"acc_stderr,none": 0.040759446590692514,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.29523809523809524,
						"acc_norm,none": 0.29523809523809524,
						"acc_norm_stderr,none": 0.03155253554505399,
						"acc_stderr,none": 0.03155253554505399,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.03387720998298804,
						"acc_stderr,none": 0.03387720998298804,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.032947543143888765,
						"acc_stderr,none": 0.032947543143888765,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.04126514736324099,
						"acc_stderr,none": 0.04126514736324099,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2827586206896552,
						"acc_norm,none": 0.2827586206896552,
						"acc_norm_stderr,none": 0.03752833958003336,
						"acc_stderr,none": 0.03752833958003336,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.032888897342098225,
						"acc_stderr,none": 0.032888897342098225,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.25118483412322273,
						"acc_norm,none": 0.25118483412322273,
						"acc_norm_stderr,none": 0.029927771242945208,
						"acc_stderr,none": 0.029927771242945208,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2579787234042553,
						"acc_norm,none": 0.2579787234042553,
						"acc_norm_stderr,none": 0.022593550801056263,
						"acc_stderr,none": 0.022593550801056263,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.2974137931034483,
						"acc_norm,none": 0.2974137931034483,
						"acc_norm_stderr,none": 0.030076297550592983,
						"acc_stderr,none": 0.030076297550592983,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.28735632183908044,
						"acc_norm,none": 0.28735632183908044,
						"acc_norm_stderr,none": 0.03440515707228721,
						"acc_stderr,none": 0.03440515707228721,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.29646017699115046,
						"acc_norm,none": 0.29646017699115046,
						"acc_norm_stderr,none": 0.030446422190794638,
						"acc_stderr,none": 0.030446422190794638,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.03588624800091709,
						"acc_stderr,none": 0.03588624800091709,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.032092816451453864,
						"acc_stderr,none": 0.032092816451453864,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.27218934911242604,
						"acc_norm,none": 0.27218934911242604,
						"acc_norm_stderr,none": 0.03433919627548533,
						"acc_stderr,none": 0.03433919627548533,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2670807453416149,
						"acc_norm,none": 0.2670807453416149,
						"acc_norm_stderr,none": 0.03497754822823695,
						"acc_stderr,none": 0.03497754822823695,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.30625,
						"acc_norm,none": 0.30625,
						"acc_norm_stderr,none": 0.036554511504337694,
						"acc_stderr,none": 0.036554511504337694,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.16816948860566025,
						"mcc_stderr,none": 0.030714886414754734
					},
					"copa": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.03684529491774709,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.75977377757901,
						"likelihood_diff_stderr,none": 0.5415490435210182,
						"pct_stereotype,none": 0.6161299940369708,
						"pct_stereotype_stderr,none": 0.06954766232246751
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 4.01520572450805,
						"likelihood_diff_stderr,none": 0.09467879568013395,
						"pct_stereotype,none": 0.6457960644007156,
						"pct_stereotype_stderr,none": 0.011682542807413805
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.208791208791209,
						"likelihood_diff_stderr,none": 0.4012916747014988,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.340909090909091,
						"likelihood_diff_stderr,none": 1.6370263809422336,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.1692307692307695,
						"likelihood_diff_stderr,none": 0.6179921469072669,
						"pct_stereotype,none": 0.7692307692307693,
						"pct_stereotype_stderr,none": 0.05266563052934292
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.847265625,
						"likelihood_diff_stderr,none": 0.17296880812920437,
						"pct_stereotype,none": 0.609375,
						"pct_stereotype_stderr,none": 0.02731662195498096
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 4.007523148148148,
						"likelihood_diff_stderr,none": 0.2667553611334916,
						"pct_stereotype,none": 0.5925925925925926,
						"pct_stereotype_stderr,none": 0.03350991604696042
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.557291666666667,
						"likelihood_diff_stderr,none": 0.3940218115340738,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.858021653543307,
						"likelihood_diff_stderr,none": 0.1703027146958789,
						"pct_stereotype,none": 0.5590551181102362,
						"pct_stereotype_stderr,none": 0.022050349996327274
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.233108108108108,
						"likelihood_diff_stderr,none": 0.38777874470979745,
						"pct_stereotype,none": 0.7567567567567568,
						"pct_stereotype_stderr,none": 0.04090743073860919
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.419354838709677,
						"likelihood_diff_stderr,none": 0.5159500332429978,
						"pct_stereotype,none": 0.8709677419354839,
						"pct_stereotype_stderr,none": 0.034950731541029775
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.41578947368421,
						"likelihood_diff_stderr,none": 0.24696515681818743,
						"pct_stereotype,none": 0.7,
						"pct_stereotype_stderr,none": 0.03333333333333336
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.5024597495527727,
						"likelihood_diff_stderr,none": 0.08032792200248837,
						"pct_stereotype,none": 0.5867620751341681,
						"pct_stereotype_stderr,none": 0.012028018759276815
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.4569444444444444,
						"likelihood_diff_stderr,none": 0.31468903159630307,
						"pct_stereotype,none": 0.6444444444444445,
						"pct_stereotype_stderr,none": 0.05074011803597719
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.3846153846153846,
						"likelihood_diff_stderr,none": 1.100559074349997,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.325757575757576,
						"likelihood_diff_stderr,none": 0.5163122806246484,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.056159743502623156
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.962227414330218,
						"likelihood_diff_stderr,none": 0.14115979159805578,
						"pct_stereotype,none": 0.632398753894081,
						"pct_stereotype_stderr,none": 0.02695311728071167
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.8048418972332017,
						"likelihood_diff_stderr,none": 0.21312960793191418,
						"pct_stereotype,none": 0.40711462450592883,
						"pct_stereotype_stderr,none": 0.030948774049323072
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.4618055555555554,
						"likelihood_diff_stderr,none": 0.3932274528449492,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.05594542388644592
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.3228260869565216,
						"likelihood_diff_stderr,none": 0.16341217666173166,
						"pct_stereotype,none": 0.4956521739130435,
						"pct_stereotype_stderr,none": 0.023337119039688343
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.3065217391304347,
						"likelihood_diff_stderr,none": 0.2764784277831033,
						"pct_stereotype,none": 0.6956521739130435,
						"pct_stereotype_stderr,none": 0.043095185024639285
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.2472527472527473,
						"likelihood_diff_stderr,none": 0.31410897162379164,
						"pct_stereotype,none": 0.7912087912087912,
						"pct_stereotype_stderr,none": 0.04284305206509431
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.12468112244898,
						"likelihood_diff_stderr,none": 0.25383493177294925,
						"pct_stereotype,none": 0.6938775510204082,
						"pct_stereotype_stderr,none": 0.033004389390311806
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.06594488188976377,
						"exact_match_stderr,none": 0.005507085737903648
					},
					"glue": {
						"acc,none": 0.7301149057803601,
						"acc_stderr,none": 0.04849063916526982,
						"alias": "glue",
						"f1,none": 0.7164277774601332,
						"f1_stderr,none": 0.00010767812473300899,
						"mcc,none": 0.16816948860566025,
						"mcc_stderr,none": 0.0009434042474712849
					},
					"hellaswag": {
						"acc,none": 0.5597490539733121,
						"acc_norm,none": 0.7471619199362677,
						"acc_norm_stderr,none": 0.004337506344899927,
						"acc_stderr,none": 0.004954026775425776,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.26260467802483406,
						"acc_norm,none": 0.26260467802483406,
						"acc_norm_stderr,none": 0.02512452642389409,
						"acc_stderr,none": 0.02512452642389409,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816508,
						"acc_stderr,none": 0.04229525846816508,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.01399667485179628,
						"acc_stderr,none": 0.01399667485179628,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.014236526215291354,
						"acc_stderr,none": 0.014236526215291354,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.251,
						"acc_norm,none": 0.251,
						"acc_norm_stderr,none": 0.01371813351688892,
						"acc_stderr,none": 0.01371813351688892,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.014221154708434956,
						"acc_stderr,none": 0.014221154708434956,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.25166666666666665,
						"acc_norm,none": 0.25166666666666665,
						"acc_norm_stderr,none": 0.01773156149490717,
						"acc_stderr,none": 0.01773156149490717,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.013663187134877656,
						"acc_stderr,none": 0.013663187134877656,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.327,
						"acc_norm,none": 0.327,
						"acc_norm_stderr,none": 0.014842213153411242,
						"acc_stderr,none": 0.014842213153411242,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.013895037677965126,
						"acc_stderr,none": 0.013895037677965126,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.030488073292114216,
						"acc_stderr,none": 0.030488073292114216,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.289,
						"acc_norm,none": 0.289,
						"acc_norm_stderr,none": 0.014341711358296172,
						"acc_stderr,none": 0.014341711358296172,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.035218036253024915,
						"acc_stderr,none": 0.035218036253024915,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.04408440022768077,
						"acc_stderr,none": 0.04408440022768077,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.013414729030247124,
						"acc_stderr,none": 0.013414729030247124,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740666,
						"acc_stderr,none": 0.014142984975740666,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796275,
						"acc_stderr,none": 0.013996674851796275,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750636,
						"acc_stderr,none": 0.013626065817750636,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796273,
						"acc_stderr,none": 0.013996674851796273,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.249,
						"acc_norm,none": 0.249,
						"acc_norm_stderr,none": 0.013681600278702301,
						"acc_stderr,none": 0.013681600278702301,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809947,
						"acc_stderr,none": 0.013963164754809947,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145158,
						"acc_stderr,none": 0.013979965645145158,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259719,
						"acc_stderr,none": 0.013929286594259719,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.292,
						"acc_norm,none": 0.292,
						"acc_norm_stderr,none": 0.014385511563477341,
						"acc_stderr,none": 0.014385511563477341,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729472,
						"acc_stderr,none": 0.014013292702729472,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.233,
						"acc_norm,none": 0.233,
						"acc_norm_stderr,none": 0.013374972519220069,
						"acc_stderr,none": 0.013374972519220069,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.013772206565168544,
						"acc_stderr,none": 0.013772206565168544,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.013550631705555954,
						"acc_stderr,none": 0.013550631705555954,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.01834559715276358,
						"acc_stderr,none": 0.01834559715276358,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920843,
						"acc_stderr,none": 0.013512312258920843,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774166,
						"acc_stderr,none": 0.013877773329774166,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750636,
						"acc_stderr,none": 0.013626065817750636,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.274,
						"acc_norm,none": 0.274,
						"acc_norm_stderr,none": 0.014111099288259588,
						"acc_stderr,none": 0.014111099288259588,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.040936018074033256,
						"acc_stderr,none": 0.040936018074033256,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23666666666666666,
						"acc_norm,none": 0.23666666666666666,
						"acc_norm_stderr,none": 0.024580463430538727,
						"acc_stderr,none": 0.024580463430538727,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314133,
						"acc_stderr,none": 0.013644675781314133,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729474,
						"acc_stderr,none": 0.014013292702729474,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.013772206565168537,
						"acc_stderr,none": 0.013772206565168537,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.03048807329211421,
						"acc_stderr,none": 0.03048807329211421,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.013394902889660007,
						"acc_stderr,none": 0.013394902889660007,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633915,
						"acc_stderr,none": 0.014046255632633915,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.02936514188266332,
						"acc_stderr,none": 0.02936514188266332,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.319,
						"acc_norm,none": 0.319,
						"acc_norm_stderr,none": 0.014746404865473477,
						"acc_stderr,none": 0.014746404865473477,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5860556895417671,
						"acc_norm,none": 0.538,
						"acc_norm_stderr,none": 0.0004981082164328662,
						"acc_stderr,none": 0.06319725476338278,
						"alias": "kobest",
						"f1,none": 0.5743844586651707,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.6545584045584045,
						"acc_stderr,none": 0.012694999312376646,
						"alias": " - kobest_boolq",
						"f1,none": 0.6401436381460216,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.629,
						"acc_stderr,none": 0.015283736211823188,
						"alias": " - kobest_copa",
						"f1,none": 0.6279549253854075,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.43,
						"acc_norm,none": 0.538,
						"acc_norm_stderr,none": 0.022318338119870534,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.4259777312531793,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.6851385390428212,
						"acc_stderr,none": 0.023340027251997023,
						"alias": " - kobest_sentineg",
						"f1,none": 0.6828120905319808,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.5063492063492063,
						"acc_stderr,none": 0.014090361048840493,
						"alias": " - kobest_wic",
						"f1,none": 0.48332195004891737,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7202600426935766,
						"acc_stderr,none": 0.014994411202277305,
						"alias": "lambada",
						"perplexity,none": 3.4552162142396714,
						"perplexity_stderr,none": 0.15310669389137432
					},
					"lambada_cloze": {
						"acc,none": 0.09965068891907626,
						"acc_stderr,none": 0.006811299711462279,
						"alias": "lambada_cloze",
						"perplexity,none": 148.41685558245132,
						"perplexity_stderr,none": 5.774887060775421
					},
					"lambada_multilingual": {
						"acc,none": 0.5522220065980982,
						"acc_stderr,none": 0.07700281923150025,
						"alias": "lambada_multilingual",
						"perplexity,none": 19.08669925072525,
						"perplexity_stderr,none": 7.254542912950282
					},
					"lambada_openai": {
						"acc,none": 0.7471375897535416,
						"acc_stderr,none": 0.00605556266861039,
						"alias": " - lambada_openai",
						"perplexity,none": 3.1799783556168073,
						"perplexity_stderr,none": 0.0619783536022585
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.08888026392392781,
						"acc_stderr,none": 0.003964628217610035,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 141.63695587736163,
						"perplexity_stderr,none": 4.585814311208377
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.44323694934989327,
						"acc_stderr,none": 0.006920942710141888,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 31.710648242375186,
						"perplexity_stderr,none": 1.753081353919575
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7469435280419173,
						"acc_stderr,none": 0.006057099133599554,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.1808042325052295,
						"perplexity_stderr,none": 0.062170445881232425
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4766155637492723,
						"acc_stderr,none": 0.006958355049604451,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 25.142654664716208,
						"perplexity_stderr,none": 1.224157821926803
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5647195808267029,
						"acc_stderr,none": 0.006907375433266108,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 15.137419373221363,
						"perplexity_stderr,none": 0.7335540297627436
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5295944110227052,
						"acc_stderr,none": 0.0069537649387797626,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 20.261969740808247,
						"perplexity_stderr,none": 1.068461374606832
					},
					"lambada_standard": {
						"acc,none": 0.6929943722103629,
						"acc_stderr,none": 0.006426138700468181,
						"alias": " - lambada_standard",
						"perplexity,none": 3.7295338838508907,
						"perplexity_stderr,none": 0.07353943134911345
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.11042111391422472,
						"acc_stderr,none": 0.004366468867623563,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 155.196755287541,
						"perplexity_stderr,none": 4.76313344579508
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.33587786259541985,
						"exact_match_stderr,get-answer": 0.011915892495388176
					},
					"logiqa": {
						"acc,none": 0.24423963133640553,
						"acc_norm,none": 0.3010752688172043,
						"acc_norm_stderr,none": 0.017992688742668232,
						"acc_stderr,none": 0.016851689430077556,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2582697201017812,
						"acc_norm,none": 0.2919847328244275,
						"acc_norm_stderr,none": 0.011471317249048263,
						"acc_stderr,none": 0.011042608058378034,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2556113902847571,
						"acc_norm,none": 0.25896147403685094,
						"acc_norm_stderr,none": 0.008019338828219917,
						"acc_stderr,none": 0.00798528739784743,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3457953823342512,
						"acc_stderr,none": 0.004895053002027348,
						"alias": "mc_taco",
						"f1,none": 0.5026970453264632,
						"f1_stderr,none": 0.0054999885741351595
					},
					"medmcqa": {
						"acc,none": 0.37532871145111163,
						"acc_norm,none": 0.37532871145111163,
						"acc_norm_stderr,none": 0.007487548564349056,
						"acc_stderr,none": 0.007487548564349056,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.39984289080911234,
						"acc_norm,none": 0.39984289080911234,
						"acc_norm_stderr,none": 0.013735156467071654,
						"acc_stderr,none": 0.013735156467071654,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.4336989032901296,
						"acc_stderr,none": 0.0990729344620139,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4888888888888889,
						"acc_stderr,none": 0.04318275491977976,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.4342105263157895,
						"acc_stderr,none": 0.0403356566784832,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.049756985195624284,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.47924528301886793,
						"acc_stderr,none": 0.030746349975723463,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.04181210050035455,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.04461960433384741,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3872832369942196,
						"acc_stderr,none": 0.037143259063020656,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.28431372549019607,
						"acc_stderr,none": 0.04488482852329017,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.050161355804659205,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3872340425531915,
						"acc_stderr,none": 0.03184389265339525,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2982456140350877,
						"acc_stderr,none": 0.04303684033537315,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4827586206896552,
						"acc_stderr,none": 0.04164188720169377,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.32275132275132273,
						"acc_stderr,none": 0.024078943243597016,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.04073524322147127,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.46774193548387094,
						"acc_stderr,none": 0.02838474778881333,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.3694581280788177,
						"acc_stderr,none": 0.03395970381998574,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5878787878787879,
						"acc_stderr,none": 0.038435669935887165,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.45454545454545453,
						"acc_stderr,none": 0.035476014940069384,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.6113989637305699,
						"acc_stderr,none": 0.03517739796373134,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.40512820512820513,
						"acc_stderr,none": 0.024890471769938145,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.0279404571362284,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.38235294117647056,
						"acc_stderr,none": 0.031566630992154156,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.25165562913907286,
						"acc_stderr,none": 0.035433042343899844,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5486238532110091,
						"acc_stderr,none": 0.021335714711268782,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.029531221160930918,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5686274509803921,
						"acc_stderr,none": 0.034760990605016355,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.679324894514768,
						"acc_stderr,none": 0.03038193194999042,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.45739910313901344,
						"acc_stderr,none": 0.033435777055830646,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.549618320610687,
						"acc_stderr,none": 0.04363643698524779,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.40403825717322,
						"acc_stderr,none": 0.11040602815759773,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4793388429752066,
						"acc_stderr,none": 0.04560456086387235,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5462962962962963,
						"acc_stderr,none": 0.048129173245368216,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3987730061349693,
						"acc_stderr,none": 0.03847021420456023,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.04287858751340457,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5436893203883495,
						"acc_stderr,none": 0.049318019942204146,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6709401709401709,
						"acc_stderr,none": 0.03078232157768817,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.05024183937956911,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.6040868454661558,
						"acc_stderr,none": 0.017488247006979273,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.4508670520231214,
						"acc_stderr,none": 0.026788811931562753,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23128491620111732,
						"acc_stderr,none": 0.014102223623152594,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.46405228758169936,
						"acc_stderr,none": 0.028555827516528784,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.48793047956227875,
						"acc_stderr,none": 0.08230300413372368,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.5209003215434084,
						"acc_stderr,none": 0.02837327096106942,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4876543209876543,
						"acc_stderr,none": 0.027812262269327242,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.02812163604063989,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.33702737940026073,
						"acc_stderr,none": 0.01207283627369132,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4117647058823529,
						"acc_stderr,none": 0.02989616303312547,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.4477124183006536,
						"acc_stderr,none": 0.020116925347422425,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.45454545454545453,
						"acc_stderr,none": 0.04769300568972743,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.46122448979591835,
						"acc_stderr,none": 0.03191282052669277,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.49041273968150795,
						"acc_stderr,none": 0.08156911679174601,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.7164179104477612,
						"acc_stderr,none": 0.03187187537919795,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3691722169362512,
						"acc_stderr,none": 0.08145667568838086,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.66,
						"acc_stderr,none": 0.04760952285695237,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3614457831325301,
						"acc_stderr,none": 0.037400593820293204,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.631578947368421,
						"acc_stderr,none": 0.036996580176568775,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7843097300050943,
						"acc_stderr,none": 0.004151796434806651,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7824450772986168,
						"acc_stderr,none": 0.0041611420395022795,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7132352941176471,
						"acc_stderr,none": 0.022417235676753935,
						"alias": "mrpc",
						"f1,none": 0.8251121076233184,
						"f1_stderr,none": 0.015910979964289588
					},
					"multimedqa": {
						"acc,none": 0.41022001419446413,
						"acc_norm,none": 0.3851143542470191,
						"acc_norm_stderr,none": 0.0001182188479162057,
						"acc_stderr,none": 0.0585200812055347,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5653877887788779,
						"acc_stderr,none": 0.007120125761242574,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7149172324022913,
						"mrr_stderr,none": 0.010310807940004274,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4040632054176072,
						"r@2_stderr,none": 0.016495030288906053
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6612114388302271,
						"mrr_stderr,none": 0.010439802804830492,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4683972911963883,
						"r@2_stderr,none": 0.01677371055764036
					},
					"openbookqa": {
						"acc,none": 0.306,
						"acc_norm,none": 0.422,
						"acc_norm_stderr,none": 0.022109039310618556,
						"acc_stderr,none": 0.020629569998345414,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.3975,
						"acc_stderr,none": 0.010945628277499656,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3795,
						"acc_stderr,none": 0.010853514379554374,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4085,
						"acc_stderr,none": 0.0109942854318084,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.011135708419359796,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.531,
						"acc_stderr,none": 0.011161621338114472,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5285,
						"acc_stderr,none": 0.011164954236428808,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5235,
						"acc_stderr,none": 0.011170777418517836,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4734999999999999,
						"acc_stderr,none": 0.057672983113210445,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7742110990206746,
						"acc_norm,none": 0.780739934711643,
						"acc_norm_stderr,none": 0.009653357463605329,
						"acc_stderr,none": 0.009754980670917325,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.27177625960717333,
						"acc_norm,none": 0.2952604611443211,
						"acc_norm_stderr,none": 0.003332653156350707,
						"acc_stderr,none": 0.0032502092833277874,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.02181430098478764,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7547764662970767,
						"acc_norm,none": 0.6207284168810908,
						"acc_norm_stderr,none": 0.008268959432667734,
						"acc_stderr,none": 0.14733170535246046,
						"alias": "pythia",
						"bits_per_byte,none": 0.6396265091587476,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5579257848582162,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.1799783556168073,
						"perplexity_stderr,none": 0.0619783536022585,
						"word_perplexity,none": 10.70617463269285,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.36879432624113473,
						"acc_norm,none": 0.4219858156028369,
						"acc_norm_stderr,none": 0.05567886322575791,
						"acc_stderr,none": 0.04236144642383186,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.525,
						"acc_norm_stderr,none": 0.045777595341980594,
						"acc_stderr,none": 0.0451938453788867,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.325,
						"acc_norm,none": 0.40625,
						"acc_norm_stderr,none": 0.03894932504400619,
						"acc_stderr,none": 0.03714454174077365,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3732394366197183,
						"acc_norm,none": 0.3873239436619718,
						"acc_norm_stderr,none": 0.02895738957595096,
						"acc_stderr,none": 0.02875089548898921,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.006765015986877446,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7385110066782092,
						"acc_stderr,none": 0.002185539666277411,
						"alias": "qqp",
						"f1,none": 0.7148559715179631,
						"f1_stderr,none": 0.002656923338564827
					},
					"race": {
						"acc,none": 0.3559808612440191,
						"acc_stderr,none": 0.014818780400538124,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2565,
						"em_stderr,none": 0.00436722821343531,
						"f1,none": 0.26570190499722957,
						"f1_stderr,none": 0.004380566497613596
					},
					"rte": {
						"acc,none": 0.6967509025270758,
						"acc_stderr,none": 0.027668396293593706,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.927,
						"acc_norm,none": 0.931,
						"acc_norm_stderr,none": 0.008018934050315151,
						"acc_stderr,none": 0.00823035471524407,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6895306859205776,
						"acc_stderr,none": 0.027850410392630694,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8876146788990825,
						"acc_stderr,none": 0.01070182773009327,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5875737278816355,
						"acc_norm,none": 0.7760671798460462,
						"acc_norm_stderr,none": 0.0029474011971796087,
						"acc_stderr,none": 0.0034804473463639664,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6732221889454594,
						"acc_stderr,none": 0.08418482277632774,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6031650641025641,
						"acc_stderr,none": 0.00489657604164974,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8516266342353299,
						"acc_stderr,none": 0.003578755565041786,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5692156862745098,
						"acc_stderr,none": 0.004903312518256163,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.34561677724581125,
						"acc_stderr,none": 0.0015002876271933555,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.016339170373280903,
						"bleu_diff,none": -6.966932621926429,
						"bleu_diff_stderr,none": 0.824564524663069,
						"bleu_max,none": 25.595486127300646,
						"bleu_max_stderr,none": 0.7862675161693766,
						"rouge1_acc,none": 0.2937576499388005,
						"rouge1_acc_stderr,none": 0.015945068581236614,
						"rouge1_diff,none": -9.129749584829064,
						"rouge1_diff_stderr,none": 0.8843025304565784,
						"rouge1_max,none": 51.24160955366627,
						"rouge1_max_stderr,none": 0.8471568075415284,
						"rouge2_acc,none": 0.2582619339045288,
						"rouge2_acc_stderr,none": 0.015321821688476189,
						"rouge2_diff,none": -10.690393479392183,
						"rouge2_diff_stderr,none": 1.0773969464432287,
						"rouge2_max,none": 35.20192789830806,
						"rouge2_max_stderr,none": 0.9877722884597451,
						"rougeL_acc,none": 0.29253365973072215,
						"rougeL_acc_stderr,none": 0.015925597445286165,
						"rougeL_diff,none": -9.285548313850288,
						"rougeL_diff_stderr,none": 0.9013542036926124,
						"rougeL_max,none": 48.276105917652785,
						"rougeL_max_stderr,none": 0.8602935524860347
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.016339170373280903,
						"bleu_diff,none": -6.966932621926429,
						"bleu_diff_stderr,none": 0.824564524663069,
						"bleu_max,none": 25.595486127300646,
						"bleu_max_stderr,none": 0.7862675161693766,
						"rouge1_acc,none": 0.2937576499388005,
						"rouge1_acc_stderr,none": 0.015945068581236614,
						"rouge1_diff,none": -9.129749584829064,
						"rouge1_diff_stderr,none": 0.8843025304565784,
						"rouge1_max,none": 51.24160955366627,
						"rouge1_max_stderr,none": 0.8471568075415284,
						"rouge2_acc,none": 0.2582619339045288,
						"rouge2_acc_stderr,none": 0.015321821688476189,
						"rouge2_diff,none": -10.690393479392183,
						"rouge2_diff_stderr,none": 1.0773969464432287,
						"rouge2_max,none": 35.20192789830806,
						"rouge2_max_stderr,none": 0.9877722884597451,
						"rougeL_acc,none": 0.29253365973072215,
						"rougeL_acc_stderr,none": 0.015925597445286165,
						"rougeL_diff,none": -9.285548313850288,
						"rougeL_diff_stderr,none": 0.9013542036926124,
						"rougeL_max,none": 48.276105917652785,
						"rougeL_max_stderr,none": 0.8602935524860347
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2741738066095471,
						"acc_stderr,none": 0.01561651849721937,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4170597478820754,
						"acc_stderr,none": 0.01426049278468305,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.06496062992125984,
						"exact_match_stderr,none": 0.005468712606129275
					},
					"wic": {
						"acc,none": 0.567398119122257,
						"acc_stderr,none": 0.019629915558485086,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6396265091587476,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5579257848582162,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.70617463269285,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7363851617995264,
						"acc_stderr,none": 0.012382849299658466,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4788732394366197,
						"acc_stderr,none": 0.05970805879899504,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8571428571428571,
						"acc_stderr,none": 0.021217447349500138,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6230909090909091,
						"acc_stderr,none": 0.07193581940134663,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.021874299301689253,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668093,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.02051442622562805,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.0195369235747476,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.02222133153414302,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.022187215803029008,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.648,
						"acc_stderr,none": 0.02138004238594606,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.020143572847290774,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.712,
						"acc_stderr,none": 0.020271503835075224,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.438714859437751,
						"acc_stderr,none": 0.04644427584995752,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.334136546184739,
						"acc_stderr,none": 0.009454577602463623,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.47309236947791167,
						"acc_stderr,none": 0.010007549970702514,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4831325301204819,
						"acc_stderr,none": 0.010016368453021545,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.42168674698795183,
						"acc_stderr,none": 0.009898379493335446,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5369477911646586,
						"acc_stderr,none": 0.009994672360002297,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.46626506024096387,
						"acc_stderr,none": 0.00999923568472161,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4919678714859438,
						"acc_stderr,none": 0.010020779633955253,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42530120481927713,
						"acc_stderr,none": 0.009909597192221134,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4819277108433735,
						"acc_stderr,none": 0.010015524156629813,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.41967871485943775,
						"acc_stderr,none": 0.009891912665432365,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41927710843373495,
						"acc_stderr,none": 0.009890599137391928,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4502008032128514,
						"acc_stderr,none": 0.00997224029676889,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41365461847389556,
						"acc_stderr,none": 0.009871502159099365,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41887550200803214,
						"acc_stderr,none": 0.009889278882314561,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.344578313253012,
						"acc_stderr,none": 0.009525590900110653,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6332952289272606,
						"acc_stderr,none": 0.05426600506573858,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5962938451356717,
						"acc_stderr,none": 0.012626249735246581,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7729980145598941,
						"acc_stderr,none": 0.010779920137756038,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7213765718067505,
						"acc_stderr,none": 0.011537224908075912,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5784248841826605,
						"acc_stderr,none": 0.012707862131801905,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.600926538716082,
						"acc_stderr,none": 0.0126022660051843,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6697551290536069,
						"acc_stderr,none": 0.012102848336416564,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5440105890138981,
						"acc_stderr,none": 0.012817182901076037,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6882859033752482,
						"acc_stderr,none": 0.011919943180399331,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5579086697551291,
						"acc_stderr,none": 0.012780536370279766,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5810721376571807,
						"acc_stderr,none": 0.012696855440486902,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6551952349437459,
						"acc_stderr,none": 0.01223160706088492,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8125421443020904,
						"acc_stderr,none": 0.03966825911669438,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8744086021505376,
						"acc_stderr,none": 0.006874151446168045,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6626506024096386,
						"acc_stderr,none": 0.05221260262032129,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7330552659019812,
						"acc_stderr,none": 0.014292107806351878,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.8212927756653993,
						"acc_stderr,none": 0.023668427798386103,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.7015873015873015,
						"acc_stderr,none": 0.02582169136035425,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7678571428571429,
						"acc_stderr,none": 0.018824952299180426,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/225-EagleX-PreFT-E"
	},
	"./rwkv-x-dev/225-EagleX-PreFT-F": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6358511837655016,
						"acc_norm,none": 0.6183765501691093,
						"acc_norm_stderr,none": 0.07620513869563776,
						"acc_stderr,none": 0.104226283875455,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.5096875,
						"acc_stderr,none": 0.05249191363131572,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.17025,
						"acc_stderr,none": 0.2358497454100739,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8272985074626865,
						"acc_stderr,none": 0.16191316138386289,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.25780089153046054,
						"acc_norm,none": 0.25780089153046054,
						"acc_norm_stderr,none": 0.11450610305641491,
						"acc_stderr,none": 0.11450610305641491,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.28639267829390436,
						"acc_norm,none": 0.28639267829390436,
						"acc_norm_stderr,none": 0.048175945421278905,
						"acc_stderr,none": 0.048175945421278905,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.703264758497317,
						"likelihood_diff_stderr,none": 0.5409853721844432,
						"pct_stereotype,none": 0.610912343470483,
						"pct_stereotype_stderr,none": 0.07049261800655977
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05216535433070866,
						"exact_match_stderr,none": 0.004934037077281569
					},
					"glue": {
						"acc,none": 0.7439256788947117,
						"acc_stderr,none": 0.004486672404747918,
						"alias": "glue",
						"f1,none": 0.7260010895764197,
						"f1_stderr,none": 7.71937501333283e-05,
						"mcc,none": 0.2004665069808333,
						"mcc_stderr,none": 0.03232600167796314
					},
					"kmmlu": {
						"acc,none": 0.2642795264221774,
						"acc_norm,none": 0.2642795264221774,
						"acc_norm_stderr,none": 0.02591801341423721,
						"acc_stderr,none": 0.02591801341423721,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5718044288533216,
						"acc_norm,none": 0.542,
						"acc_norm_stderr,none": 0.0004974669338677375,
						"acc_stderr,none": 0.06148063452478604,
						"alias": "kobest",
						"f1,none": 0.5488603643923043,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7265670483213662,
						"acc_stderr,none": 0.014492379122331596,
						"alias": "lambada",
						"perplexity,none": 3.385994977942082,
						"perplexity_stderr,none": 0.1478991564303529
					},
					"lambada_cloze": {
						"acc,none": 0.07704249951484572,
						"acc_stderr,none": 0.009576691634270237,
						"alias": "lambada_cloze",
						"perplexity,none": 173.4080263457022,
						"perplexity_stderr,none": 5.634216080918945
					},
					"lambada_multilingual": {
						"acc,none": 0.5541238113720163,
						"acc_stderr,none": 0.08228397169239214,
						"alias": "lambada_multilingual",
						"perplexity,none": 18.83756081520726,
						"perplexity_stderr,none": 7.364939712028874
					},
					"mmlu": {
						"acc,none": 0.43804301381569577,
						"acc_stderr,none": 0.10033682218195927,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.4055260361317747,
						"acc_stderr,none": 0.1080409758096497,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.49468941100740266,
						"acc_stderr,none": 0.0866184181327758,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.49821254468638293,
						"acc_stderr,none": 0.08443088573280304,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3720266412940057,
						"acc_stderr,none": 0.08276586285046318,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.4107877927608233,
						"acc_norm,none": 0.3841349722778492,
						"acc_norm_stderr,none": 0.00010642961870287373,
						"acc_stderr,none": 0.05381311790660645,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4702857142857143,
						"acc_stderr,none": 0.05762477331460354,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.752440051486513,
						"acc_norm,none": 0.622951179453004,
						"acc_norm_stderr,none": 0.007880964636376735,
						"acc_stderr,none": 0.154441954088151,
						"alias": "pythia",
						"bits_per_byte,none": 0.6393202013936226,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5575950468323259,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.123631827950768,
						"perplexity_stderr,none": 0.060781252986980915,
						"word_perplexity,none": 10.694026261310556,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3723404255319149,
						"acc_norm,none": 0.425531914893617,
						"acc_norm_stderr,none": 0.054361997567311325,
						"acc_stderr,none": 0.04760751849429055,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6703271105786829,
						"acc_stderr,none": 0.08862139099201637,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.34013043916503455,
						"acc_stderr,none": 0.0015214412084548256,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3402692778457772,
						"bleu_acc_stderr,none": 0.016586304901762553,
						"bleu_diff,none": -6.3885216667008216,
						"bleu_diff_stderr,none": 0.8237728208335762,
						"bleu_max,none": 25.848948085807137,
						"bleu_max_stderr,none": 0.7864128636857829,
						"rouge1_acc,none": 0.30966952264381886,
						"rouge1_acc_stderr,none": 0.016185744355144912,
						"rouge1_diff,none": -8.407910889709958,
						"rouge1_diff_stderr,none": 0.9109032325137808,
						"rouge1_max,none": 51.46798847863768,
						"rouge1_max_stderr,none": 0.8544738802487006,
						"rouge2_acc,none": 0.27539779681762544,
						"rouge2_acc_stderr,none": 0.015638135667775523,
						"rouge2_diff,none": -9.952391260329206,
						"rouge2_diff_stderr,none": 1.0960462809375815,
						"rouge2_max,none": 35.498492375426586,
						"rouge2_max_stderr,none": 0.9955292745738608,
						"rougeL_acc,none": 0.3108935128518972,
						"rougeL_acc_stderr,none": 0.016203316673559693,
						"rougeL_diff,none": -8.54103962629304,
						"rougeL_diff_stderr,none": 0.9259882554244762,
						"rougeL_max,none": 48.57435182019648,
						"rougeL_max_stderr,none": 0.868054036707988
					},
					"xcopa": {
						"acc,none": 0.6232727272727273,
						"acc_stderr,none": 0.07228347229489104,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43815261044176707,
						"acc_stderr,none": 0.04704126251631503,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6329944046687925,
						"acc_stderr,none": 0.060562882816996615,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8154641492470218,
						"acc_stderr,none": 0.03811008839661942,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6358511837655016,
						"acc_norm,none": 0.6183765501691093,
						"acc_norm_stderr,none": 0.07620513869563776,
						"acc_stderr,none": 0.104226283875455,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.5096875,
						"acc_stderr,none": 0.05249191363131572,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.618,
						"acc_stderr,none": 0.015372453034968526,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.455,
						"acc_stderr,none": 0.01575510149834709,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.465,
						"acc_stderr,none": 0.014404353664908238,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.41552901023890787,
						"acc_norm,none": 0.45819112627986347,
						"acc_norm_stderr,none": 0.0145602203087147,
						"acc_stderr,none": 0.014401366641216391,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7445286195286195,
						"acc_norm,none": 0.6973905723905723,
						"acc_norm_stderr,none": 0.009426434542371223,
						"acc_stderr,none": 0.008949113551665569,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.17025,
						"acc_stderr,none": 0.2358497454100739,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0955,
						"acc_stderr,none": 0.006573544001554181,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.353,
						"acc_stderr,none": 0.010688902016257785,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.057,
						"acc_stderr,none": 0.005185455088247822,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.005891082449449557,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0055,
						"acc_stderr,none": 0.0016541593398342205,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.1595,
						"acc_stderr,none": 0.008189225036800002,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0035,
						"acc_stderr,none": 0.0013208888574315688,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.06,
						"acc_stderr,none": 0.005311695308799959,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.000706929893933947,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.043,
						"acc_stderr,none": 0.004537156917767922,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0017353579175704988,
						"acc_stderr,none": 0.0008671138796248289,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8272985074626865,
						"acc_stderr,none": 0.16191316138386289,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036385,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346936,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929341016,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406724,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557844014,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.766,
						"acc_stderr,none": 0.013394902889660007,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295448,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491129,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844884,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036437,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163036,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030054,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.00612507277642612,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474916,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140913,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178326,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177549,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.011171786285496501,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.671,
						"acc_stderr,none": 0.014865395385928366,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.711,
						"acc_stderr,none": 0.014341711358296183,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792965,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.01031821038094609,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910667,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.015605111967541947,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996672,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.011772110370812192,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.597,
						"acc_stderr,none": 0.015518757419066534,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.721,
						"acc_stderr,none": 0.01419015011761203,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.00985982840703718,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832025,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745889,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704164,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.767,
						"acc_stderr,none": 0.013374972519220063,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792965,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.343,
						"acc_stderr,none": 0.015019206922356951,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.01507060460376841,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.614,
						"acc_stderr,none": 0.01540263747678436,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.014658474370508996,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.565,
						"acc_stderr,none": 0.0156850572527172,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946092,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523712,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.012953717566737239,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.001000000000000009,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024973,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910606,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524298,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.737,
						"acc_stderr,none": 0.01392928659425972,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.01284137457209693,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427425,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783207,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469419,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.603,
						"acc_stderr,none": 0.015480007449307989,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.015680876566375058,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.011598902298688998,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318223,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.015090650341444231,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.011772110370812192,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103322,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.278,
						"acc_stderr,none": 0.014174516461485251,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.821,
						"acc_stderr,none": 0.012128730605719118,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400246,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855752,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.0055683935750813806,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.394,
						"acc_stderr,none": 0.015459721957493379,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.015499685165842596,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6697247706422018,
						"acc_stderr,none": 0.00822581091427727,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8928571428571429,
						"acc_stderr,none": 0.04170530058008159,
						"alias": "cb",
						"f1,none": 0.724616858237548,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.25780089153046054,
						"acc_norm,none": 0.25780089153046054,
						"acc_norm_stderr,none": 0.11450610305641491,
						"acc_stderr,none": 0.11450610305641491,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.062069005411206316,
						"acc_stderr,none": 0.062069005411206316,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.07872958216222173,
						"acc_stderr,none": 0.07872958216222173,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0879391124952055,
						"acc_stderr,none": 0.0879391124952055,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2978723404255319,
						"acc_norm,none": 0.2978723404255319,
						"acc_norm_stderr,none": 0.06742861107915607,
						"acc_stderr,none": 0.06742861107915607,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445797,
						"acc_stderr,none": 0.06180629713445797,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063838,
						"acc_stderr,none": 0.07213122508063838,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.07988892740217939,
						"acc_stderr,none": 0.07988892740217939,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736842,
						"acc_stderr,none": 0.05263157894736842,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.060073850409370216,
						"acc_stderr,none": 0.060073850409370216,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.11236664374387369,
						"acc_stderr,none": 0.11236664374387369,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0879391124952055,
						"acc_stderr,none": 0.0879391124952055,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628253,
						"acc_stderr,none": 0.05817221556628253,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.07335878043508444,
						"acc_stderr,none": 0.07335878043508444,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.32608695652173914,
						"acc_norm,none": 0.32608695652173914,
						"acc_norm_stderr,none": 0.06988152725357213,
						"acc_stderr,none": 0.06988152725357213,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.28639267829390436,
						"acc_norm,none": 0.28639267829390436,
						"acc_norm_stderr,none": 0.048175945421278905,
						"acc_stderr,none": 0.048175945421278905,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28994082840236685,
						"acc_norm,none": 0.28994082840236685,
						"acc_norm_stderr,none": 0.03500638924911012,
						"acc_stderr,none": 0.03500638924911012,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.34375,
						"acc_norm,none": 0.34375,
						"acc_norm_stderr,none": 0.03766668927755763,
						"acc_stderr,none": 0.03766668927755763,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.0340150671524904,
						"acc_stderr,none": 0.0340150671524904,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.03223012819451556,
						"acc_stderr,none": 0.03223012819451556,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.03515674134876764,
						"acc_stderr,none": 0.03515674134876764,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2824427480916031,
						"acc_norm,none": 0.2824427480916031,
						"acc_norm_stderr,none": 0.03948406125768361,
						"acc_stderr,none": 0.03948406125768361,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.037970424962817856,
						"acc_stderr,none": 0.037970424962817856,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.308411214953271,
						"acc_norm,none": 0.308411214953271,
						"acc_norm_stderr,none": 0.04485760883316697,
						"acc_stderr,none": 0.04485760883316697,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2848297213622291,
						"acc_norm,none": 0.2848297213622291,
						"acc_norm_stderr,none": 0.025151821686179503,
						"acc_stderr,none": 0.025151821686179503,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.030964517926923382,
						"acc_stderr,none": 0.030964517926923382,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.31843575418994413,
						"acc_norm,none": 0.31843575418994413,
						"acc_norm_stderr,none": 0.03491839802265681,
						"acc_stderr,none": 0.03491839802265681,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.02830465794303529,
						"acc_stderr,none": 0.02830465794303529,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004222,
						"acc_stderr,none": 0.04742907046004222,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.330188679245283,
						"acc_norm,none": 0.330188679245283,
						"acc_norm_stderr,none": 0.045894715469579954,
						"acc_stderr,none": 0.045894715469579954,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809826,
						"acc_stderr,none": 0.039578354719809826,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055043,
						"acc_stderr,none": 0.04232473532055043,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.25274725274725274,
						"acc_norm,none": 0.25274725274725274,
						"acc_norm_stderr,none": 0.026350722655564394,
						"acc_stderr,none": 0.026350722655564394,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3088235294117647,
						"acc_norm,none": 0.3088235294117647,
						"acc_norm_stderr,none": 0.03242661719827218,
						"acc_stderr,none": 0.03242661719827218,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.26900584795321636,
						"acc_norm,none": 0.26900584795321636,
						"acc_norm_stderr,none": 0.0340105262010409,
						"acc_stderr,none": 0.0340105262010409,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.03694846055443904,
						"acc_stderr,none": 0.03694846055443904,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.32075471698113206,
						"acc_norm,none": 0.32075471698113206,
						"acc_norm_stderr,none": 0.03713396279871006,
						"acc_stderr,none": 0.03713396279871006,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.32515337423312884,
						"acc_norm,none": 0.32515337423312884,
						"acc_norm_stderr,none": 0.03680350371286462,
						"acc_stderr,none": 0.03680350371286462,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.03451628876250621,
						"acc_stderr,none": 0.03451628876250621,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.02874673063268137,
						"acc_stderr,none": 0.02874673063268137,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.031911782267135466,
						"acc_stderr,none": 0.031911782267135466,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.41596638655462187,
						"acc_norm,none": 0.41596638655462187,
						"acc_norm_stderr,none": 0.03201650100739615,
						"acc_stderr,none": 0.03201650100739615,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2826086956521739,
						"acc_norm,none": 0.2826086956521739,
						"acc_norm_stderr,none": 0.029754528538233245,
						"acc_stderr,none": 0.029754528538233245,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.038221270785361555,
						"acc_stderr,none": 0.038221270785361555,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2784090909090909,
						"acc_norm,none": 0.2784090909090909,
						"acc_norm_stderr,none": 0.03388193526335356,
						"acc_stderr,none": 0.03388193526335356,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.28859060402684567,
						"acc_norm,none": 0.28859060402684567,
						"acc_norm_stderr,none": 0.03724517629698768,
						"acc_stderr,none": 0.03724517629698768,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3050847457627119,
						"acc_norm,none": 0.3050847457627119,
						"acc_norm_stderr,none": 0.04256799926288004,
						"acc_stderr,none": 0.04256799926288004,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2636363636363636,
						"acc_norm,none": 0.2636363636363636,
						"acc_norm_stderr,none": 0.04220224692971987,
						"acc_stderr,none": 0.04220224692971987,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.27972027972027974,
						"acc_norm,none": 0.27972027972027974,
						"acc_norm_stderr,none": 0.037667638895398536,
						"acc_stderr,none": 0.037667638895398536,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.040061680838488774,
						"acc_stderr,none": 0.040061680838488774,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.29651162790697677,
						"acc_norm,none": 0.29651162790697677,
						"acc_norm_stderr,none": 0.03492619473255952,
						"acc_stderr,none": 0.03492619473255952,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25790754257907544,
						"acc_norm,none": 0.25790754257907544,
						"acc_norm_stderr,none": 0.021605737836583285,
						"acc_stderr,none": 0.021605737836583285,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.37850467289719625,
						"acc_norm,none": 0.37850467289719625,
						"acc_norm_stderr,none": 0.033232633255714746,
						"acc_stderr,none": 0.033232633255714746,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2764227642276423,
						"acc_norm,none": 0.2764227642276423,
						"acc_norm_stderr,none": 0.0404901546062249,
						"acc_stderr,none": 0.0404901546062249,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2786885245901639,
						"acc_norm,none": 0.2786885245901639,
						"acc_norm_stderr,none": 0.04075944659069252,
						"acc_stderr,none": 0.04075944659069252,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.319047619047619,
						"acc_norm,none": 0.319047619047619,
						"acc_norm_stderr,none": 0.03224133248962465,
						"acc_stderr,none": 0.03224133248962465,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.034251778896020865,
						"acc_stderr,none": 0.034251778896020865,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.30687830687830686,
						"acc_norm,none": 0.30687830687830686,
						"acc_norm_stderr,none": 0.03363635410184865,
						"acc_stderr,none": 0.03363635410184865,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.04126514736324099,
						"acc_stderr,none": 0.04126514736324099,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.30344827586206896,
						"acc_norm,none": 0.30344827586206896,
						"acc_norm_stderr,none": 0.038312260488503336,
						"acc_stderr,none": 0.038312260488503336,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.04429811949614585,
						"acc_stderr,none": 0.04429811949614585,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.0340385177358705,
						"acc_stderr,none": 0.0340385177358705,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767256,
						"acc_stderr,none": 0.030113040167767256,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.02251703243459229,
						"acc_stderr,none": 0.02251703243459229,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3232758620689655,
						"acc_norm,none": 0.3232758620689655,
						"acc_norm_stderr,none": 0.03077417953179444,
						"acc_stderr,none": 0.03077417953179444,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2988505747126437,
						"acc_norm,none": 0.2988505747126437,
						"acc_norm_stderr,none": 0.03480240745663784,
						"acc_stderr,none": 0.03480240745663784,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3230088495575221,
						"acc_norm,none": 0.3230088495575221,
						"acc_norm_stderr,none": 0.03117507071470539,
						"acc_stderr,none": 0.03117507071470539,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.03588624800091709,
						"acc_stderr,none": 0.03588624800091709,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.032092816451453864,
						"acc_stderr,none": 0.032092816451453864,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2670807453416149,
						"acc_norm,none": 0.2670807453416149,
						"acc_norm_stderr,none": 0.03497754822823695,
						"acc_stderr,none": 0.03497754822823695,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.036342189215581536,
						"acc_stderr,none": 0.036342189215581536,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.2004665069808333,
						"mcc_stderr,none": 0.03232600167796314
					},
					"copa": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.03684529491774711,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.703264758497317,
						"likelihood_diff_stderr,none": 0.5409853721844432,
						"pct_stereotype,none": 0.610912343470483,
						"pct_stereotype_stderr,none": 0.07049261800655977
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.9667561121049495,
						"likelihood_diff_stderr,none": 0.09361249437974535,
						"pct_stereotype,none": 0.6410256410256411,
						"pct_stereotype_stderr,none": 0.011717431086755268
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.115384615384615,
						"likelihood_diff_stderr,none": 0.3947690568502733,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.04865042554105199
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.5,
						"likelihood_diff_stderr,none": 1.5840110192454184,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.0807692307692305,
						"likelihood_diff_stderr,none": 0.6057642704314616,
						"pct_stereotype,none": 0.7846153846153846,
						"pct_stereotype_stderr,none": 0.05138611236879767
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.80078125,
						"likelihood_diff_stderr,none": 0.16913795109224192,
						"pct_stereotype,none": 0.590625,
						"pct_stereotype_stderr,none": 0.027530952052640056
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.9265046296296298,
						"likelihood_diff_stderr,none": 0.2666658646626898,
						"pct_stereotype,none": 0.5879629629629629,
						"pct_stereotype_stderr,none": 0.03356787758160831
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.541666666666667,
						"likelihood_diff_stderr,none": 0.3820173497071937,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.826525590551181,
						"likelihood_diff_stderr,none": 0.1688152005376857,
						"pct_stereotype,none": 0.5511811023622047,
						"pct_stereotype_stderr,none": 0.022089136921635943
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.118243243243243,
						"likelihood_diff_stderr,none": 0.38606338486853864,
						"pct_stereotype,none": 0.7567567567567568,
						"pct_stereotype_stderr,none": 0.04090743073860919
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.369623655913978,
						"likelihood_diff_stderr,none": 0.5123279836325473,
						"pct_stereotype,none": 0.8709677419354839,
						"pct_stereotype_stderr,none": 0.034950731541029775
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.431578947368421,
						"likelihood_diff_stderr,none": 0.24513424315741672,
						"pct_stereotype,none": 0.7052631578947368,
						"pct_stereotype_stderr,none": 0.033163618429842875
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.4354502087060226,
						"likelihood_diff_stderr,none": 0.08037615991389138,
						"pct_stereotype,none": 0.5813953488372093,
						"pct_stereotype_stderr,none": 0.012050381439304614
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.3819444444444446,
						"likelihood_diff_stderr,none": 0.3136906848840893,
						"pct_stereotype,none": 0.6222222222222222,
						"pct_stereotype_stderr,none": 0.051392052067171366
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.4134615384615383,
						"likelihood_diff_stderr,none": 1.0231088533671597,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.238636363636363,
						"likelihood_diff_stderr,none": 0.5203446001072517,
						"pct_stereotype,none": 0.7424242424242424,
						"pct_stereotype_stderr,none": 0.054240275510565296
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.9458722741433023,
						"likelihood_diff_stderr,none": 0.1470494102912682,
						"pct_stereotype,none": 0.6137071651090342,
						"pct_stereotype_stderr,none": 0.027218484103343366
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.675395256916996,
						"likelihood_diff_stderr,none": 0.2095010549394445,
						"pct_stereotype,none": 0.4189723320158103,
						"pct_stereotype_stderr,none": 0.031080701217616472
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.486111111111111,
						"likelihood_diff_stderr,none": 0.38718748090043326,
						"pct_stereotype,none": 0.6527777777777778,
						"pct_stereotype_stderr,none": 0.056501146768529645
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.266304347826087,
						"likelihood_diff_stderr,none": 0.16316298389405196,
						"pct_stereotype,none": 0.48695652173913045,
						"pct_stereotype_stderr,none": 0.023330058952084724
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.2804347826086957,
						"likelihood_diff_stderr,none": 0.27139826065332456,
						"pct_stereotype,none": 0.7043478260869566,
						"pct_stereotype_stderr,none": 0.04273972288221525
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.17032967032967,
						"likelihood_diff_stderr,none": 0.3130719216034004,
						"pct_stereotype,none": 0.7912087912087912,
						"pct_stereotype_stderr,none": 0.04284305206509431
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.998724489795918,
						"likelihood_diff_stderr,none": 0.25326554739376356,
						"pct_stereotype,none": 0.6887755102040817,
						"pct_stereotype_stderr,none": 0.03315571704943973
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05216535433070866,
						"exact_match_stderr,none": 0.004934037077281569
					},
					"glue": {
						"acc,none": 0.7439256788947117,
						"acc_stderr,none": 0.004486672404747918,
						"alias": "glue",
						"f1,none": 0.7260010895764197,
						"f1_stderr,none": 7.71937501333283e-05,
						"mcc,none": 0.2004665069808333,
						"mcc_stderr,none": 0.03232600167796314
					},
					"hellaswag": {
						"acc,none": 0.5600477992431786,
						"acc_norm,none": 0.7492531368253336,
						"acc_norm_stderr,none": 0.004325572103753304,
						"acc_stderr,none": 0.0049536670286543846,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2642795264221774,
						"acc_norm,none": 0.2642795264221774,
						"acc_norm_stderr,none": 0.02591801341423721,
						"acc_stderr,none": 0.02591801341423721,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145144,
						"acc_stderr,none": 0.013979965645145144,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.279,
						"acc_norm,none": 0.279,
						"acc_norm_stderr,none": 0.014190150117612035,
						"acc_stderr,none": 0.014190150117612035,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.013607356839598123,
						"acc_stderr,none": 0.013607356839598123,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.014236526215291359,
						"acc_stderr,none": 0.014236526215291359,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.24833333333333332,
						"acc_norm,none": 0.24833333333333332,
						"acc_norm_stderr,none": 0.017652927743333015,
						"acc_stderr,none": 0.017652927743333015,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.01360735683959812,
						"acc_stderr,none": 0.01360735683959812,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.329,
						"acc_norm,none": 0.329,
						"acc_norm_stderr,none": 0.014865395385928364,
						"acc_stderr,none": 0.014865395385928364,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145151,
						"acc_stderr,none": 0.013979965645145151,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.030056479497755487,
						"acc_stderr,none": 0.030056479497755487,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.299,
						"acc_norm,none": 0.299,
						"acc_norm_stderr,none": 0.014484778521220487,
						"acc_stderr,none": 0.014484778521220487,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.035218036253024915,
						"acc_stderr,none": 0.035218036253024915,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.044619604333847394,
						"acc_stderr,none": 0.044619604333847394,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750636,
						"acc_stderr,none": 0.013626065817750636,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.289,
						"acc_norm,none": 0.289,
						"acc_norm_stderr,none": 0.014341711358296176,
						"acc_stderr,none": 0.014341711358296176,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774164,
						"acc_stderr,none": 0.013877773329774164,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314137,
						"acc_stderr,none": 0.013644675781314137,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462625,
						"acc_stderr,none": 0.014078856992462625,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234183,
						"acc_stderr,none": 0.013807775152234183,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259734,
						"acc_stderr,none": 0.013929286594259734,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774162,
						"acc_stderr,none": 0.013877773329774162,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909282,
						"acc_stderr,none": 0.04292346959909282,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568198,
						"acc_stderr,none": 0.014029819522568198,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.01456664639466439,
						"acc_stderr,none": 0.01456664639466439,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440472,
						"acc_stderr,none": 0.013946271849440472,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.232,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.013354937452281562,
						"acc_stderr,none": 0.013354937452281562,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.253,
						"acc_norm,none": 0.253,
						"acc_norm_stderr,none": 0.01375427861358708,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881428,
						"acc_stderr,none": 0.013588548437881428,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.2816666666666667,
						"acc_norm,none": 0.2816666666666667,
						"acc_norm_stderr,none": 0.018378807365901532,
						"acc_stderr,none": 0.018378807365901532,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.013550631705555951,
						"acc_stderr,none": 0.013550631705555951,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633918,
						"acc_stderr,none": 0.014046255632633918,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.253,
						"acc_norm,none": 0.253,
						"acc_norm_stderr,none": 0.01375427861358708,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440476,
						"acc_stderr,none": 0.013946271849440476,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.040936018074033256,
						"acc_stderr,none": 0.040936018074033256,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23666666666666666,
						"acc_norm,none": 0.23666666666666666,
						"acc_norm_stderr,none": 0.024580463430538727,
						"acc_stderr,none": 0.024580463430538727,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633911,
						"acc_stderr,none": 0.014046255632633911,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.030488073292114205,
						"acc_stderr,none": 0.030488073292114205,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.01351231225892084,
						"acc_stderr,none": 0.01351231225892084,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986189,
						"acc_stderr,none": 0.014062601350986189,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.030056479497755487,
						"acc_stderr,none": 0.030056479497755487,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.319,
						"acc_norm,none": 0.319,
						"acc_norm_stderr,none": 0.014746404865473475,
						"acc_stderr,none": 0.014746404865473475,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5718044288533216,
						"acc_norm,none": 0.542,
						"acc_norm_stderr,none": 0.0004974669338677375,
						"acc_stderr,none": 0.06148063452478604,
						"alias": "kobest",
						"f1,none": 0.5488603643923043,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.6061253561253561,
						"acc_stderr,none": 0.013044619102053259,
						"alias": " - kobest_boolq",
						"f1,none": 0.555215396962977,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.633,
						"acc_stderr,none": 0.015249378464171745,
						"alias": " - kobest_copa",
						"f1,none": 0.6321167122260547,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.428,
						"acc_norm,none": 0.542,
						"acc_norm_stderr,none": 0.02230396677426994,
						"acc_stderr,none": 0.022149790663861926,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.424598633811921,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.707808564231738,
						"acc_stderr,none": 0.02285304394949245,
						"alias": " - kobest_sentineg",
						"f1,none": 0.6997600792781515,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4992063492063492,
						"acc_stderr,none": 0.014091479467428242,
						"alias": " - kobest_wic",
						"f1,none": 0.47746740004114224,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7265670483213662,
						"acc_stderr,none": 0.014492379122331596,
						"alias": "lambada",
						"perplexity,none": 3.385994977942082,
						"perplexity_stderr,none": 0.1478991564303529
					},
					"lambada_cloze": {
						"acc,none": 0.07704249951484572,
						"acc_stderr,none": 0.009576691634270237,
						"alias": "lambada_cloze",
						"perplexity,none": 173.4080263457022,
						"perplexity_stderr,none": 5.634216080918945
					},
					"lambada_multilingual": {
						"acc,none": 0.5541238113720163,
						"acc_stderr,none": 0.08228397169239214,
						"alias": "lambada_multilingual",
						"perplexity,none": 18.83756081520726,
						"perplexity_stderr,none": 7.364939712028874
					},
					"lambada_openai": {
						"acc,none": 0.7502425771395304,
						"acc_stderr,none": 0.006030761152855774,
						"alias": " - lambada_openai",
						"perplexity,none": 3.123631827950768,
						"perplexity_stderr,none": 0.060781252986980915
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.05938288375703474,
						"acc_stderr,none": 0.003292677177568096,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 175.21034051964565,
						"perplexity_stderr,none": 5.7849360190150705
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.44323694934989327,
						"acc_stderr,none": 0.006920942710141903,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 31.36961678642296,
						"perplexity_stderr,none": 1.7449488724394175
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7527653793906462,
						"acc_stderr,none": 0.006010305315759311,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.122154236418865,
						"perplexity_stderr,none": 0.06069446081445905
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4737046380749078,
						"acc_stderr,none": 0.00695633779153668,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 25.042039621913865,
						"perplexity_stderr,none": 1.2279515491112283
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5666601979429459,
						"acc_stderr,none": 0.0069037923068605445,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 14.869726239634026,
						"perplexity_stderr,none": 0.7195204040047735
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5342518921016883,
						"acc_stderr,none": 0.006949613576318102,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 19.784267191646578,
						"perplexity_stderr,none": 1.0457785188460769
					},
					"lambada_standard": {
						"acc,none": 0.7003687172520862,
						"acc_stderr,none": 0.006382179569794072,
						"alias": " - lambada_standard",
						"perplexity,none": 3.6502116742417114,
						"perplexity_stderr,none": 0.07182051048711009
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.0947021152726567,
						"acc_stderr,none": 0.0040793189739294095,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 171.60571217175877,
						"perplexity_stderr,none": 5.329631795404243
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.34478371501272265,
						"exact_match_stderr,get-answer": 0.011991613472848755
					},
					"logiqa": {
						"acc,none": 0.2457757296466974,
						"acc_norm,none": 0.30261136712749614,
						"acc_norm_stderr,none": 0.01801869659815883,
						"acc_stderr,none": 0.016887410894296934,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.26208651399491095,
						"acc_norm,none": 0.28880407124681934,
						"acc_norm_stderr,none": 0.011434263441269486,
						"acc_stderr,none": 0.011095246835491722,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2592964824120603,
						"acc_norm,none": 0.26164154103852594,
						"acc_norm_stderr,none": 0.008046139671905343,
						"acc_stderr,none": 0.008022710238105768,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3419826307985596,
						"acc_stderr,none": 0.004882156585093113,
						"alias": "mc_taco",
						"f1,none": 0.5049007889074827,
						"f1_stderr,none": 0.0054687066918815905
					},
					"medmcqa": {
						"acc,none": 0.38130528328950514,
						"acc_norm,none": 0.38130528328950514,
						"acc_norm_stderr,none": 0.007510737797531824,
						"acc_stderr,none": 0.007510737797531824,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3904163393558523,
						"acc_norm,none": 0.3904163393558523,
						"acc_norm_stderr,none": 0.01367845656474356,
						"acc_stderr,none": 0.01367845656474356,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.43804301381569577,
						"acc_stderr,none": 0.10033682218195927,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4962962962962963,
						"acc_stderr,none": 0.04319223625811331,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.4276315789473684,
						"acc_stderr,none": 0.040260970832965585,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.049756985195624284,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4867924528301887,
						"acc_stderr,none": 0.030762134874500476,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.04181210050035455,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.04793724854411019,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720683,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.4161849710982659,
						"acc_stderr,none": 0.03758517775404947,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.27450980392156865,
						"acc_stderr,none": 0.044405219061793275,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.050211673156867795,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3829787234042553,
						"acc_stderr,none": 0.03177821250236922,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2719298245614035,
						"acc_stderr,none": 0.04185774424022057,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4896551724137931,
						"acc_stderr,none": 0.04165774775728763,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.30952380952380953,
						"acc_stderr,none": 0.023809523809523864,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.04073524322147127,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4870967741935484,
						"acc_stderr,none": 0.028434533152681855,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.35467980295566504,
						"acc_stderr,none": 0.03366124489051449,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.04999999999999999,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5636363636363636,
						"acc_stderr,none": 0.03872592983524754,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4797979797979798,
						"acc_stderr,none": 0.035594435655639196,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.6269430051813472,
						"acc_stderr,none": 0.034902055920485744,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.41794871794871796,
						"acc_stderr,none": 0.025007329882461217,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.28888888888888886,
						"acc_stderr,none": 0.027634907264178544,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3949579831932773,
						"acc_stderr,none": 0.03175367846096625,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.271523178807947,
						"acc_stderr,none": 0.03631329803969653,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5614678899082569,
						"acc_stderr,none": 0.021274713073954562,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.26851851851851855,
						"acc_stderr,none": 0.030225226160012404,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5637254901960784,
						"acc_stderr,none": 0.03480693138457039,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.6666666666666666,
						"acc_stderr,none": 0.03068582059661081,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.45739910313901344,
						"acc_stderr,none": 0.03343577705583065,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5572519083969466,
						"acc_stderr,none": 0.04356447202665069,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.4055260361317747,
						"acc_stderr,none": 0.1080409758096497,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4628099173553719,
						"acc_stderr,none": 0.04551711196104218,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5370370370370371,
						"acc_stderr,none": 0.04820403072760627,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.4233128834355828,
						"acc_stderr,none": 0.03881891213334383,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841043,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5339805825242718,
						"acc_stderr,none": 0.0493929144727348,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.7008547008547008,
						"acc_stderr,none": 0.029996951858349476,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.05024183937956912,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.6079182630906769,
						"acc_stderr,none": 0.017458524050147632,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.44508670520231214,
						"acc_stderr,none": 0.026756255129663772,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.21787709497206703,
						"acc_stderr,none": 0.013806211780732984,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.47058823529411764,
						"acc_stderr,none": 0.028580341065138293,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.49468941100740266,
						"acc_stderr,none": 0.0866184181327758,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.5273311897106109,
						"acc_stderr,none": 0.02835563356832818,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.49382716049382713,
						"acc_stderr,none": 0.027818623962583295,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.02812163604063989,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3533246414602347,
						"acc_stderr,none": 0.012208408211082433,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.43014705882352944,
						"acc_stderr,none": 0.030074971917302875,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.45098039215686275,
						"acc_stderr,none": 0.020130388312904524,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4909090909090909,
						"acc_stderr,none": 0.04788339768702861,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.4326530612244898,
						"acc_stderr,none": 0.03171752824062664,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.49821254468638293,
						"acc_stderr,none": 0.08443088573280304,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.7313432835820896,
						"acc_stderr,none": 0.031343283582089536,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3720266412940057,
						"acc_stderr,none": 0.08276586285046318,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.67,
						"acc_stderr,none": 0.04725815626252607,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.03726214354322415,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6257309941520468,
						"acc_stderr,none": 0.03711601185389481,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7883851248089658,
						"acc_stderr,none": 0.0041230564433915855,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7839707078925956,
						"acc_stderr,none": 0.004150566641327966,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6911764705882353,
						"acc_stderr,none": 0.022900895184021622,
						"alias": "mrpc",
						"f1,none": 0.8141592920353983,
						"f1_stderr,none": 0.016257743281071716
					},
					"multimedqa": {
						"acc,none": 0.4107877927608233,
						"acc_norm,none": 0.3841349722778492,
						"acc_norm_stderr,none": 0.00010642961870287373,
						"acc_stderr,none": 0.05381311790660645,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5622937293729373,
						"acc_stderr,none": 0.007125847019547095,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.713036118827878,
						"mrr_stderr,none": 0.010307676076689035,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.40632054176072235,
						"r@2_stderr,none": 0.01650968416729844
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6607411603693499,
						"mrr_stderr,none": 0.010435694842950418,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4729119638826185,
						"r@2_stderr,none": 0.01678263288163964
					},
					"openbookqa": {
						"acc,none": 0.304,
						"acc_norm,none": 0.424,
						"acc_norm_stderr,none": 0.022122993778135404,
						"acc_stderr,none": 0.020591649571224932,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.3945,
						"acc_stderr,none": 0.010931359582007931,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3775,
						"acc_stderr,none": 0.010842308463902531,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.404,
						"acc_stderr,none": 0.010975072943404668,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5445,
						"acc_stderr,none": 0.011138757154883975,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5185,
						"acc_stderr,none": 0.011175478542788579,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5235,
						"acc_stderr,none": 0.011170777418517835,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5295,
						"acc_stderr,none": 0.011163654804511657,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4702857142857143,
						"acc_stderr,none": 0.05762477331460354,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7763873775843307,
						"acc_norm,none": 0.7829162132752993,
						"acc_norm_stderr,none": 0.009618708415756778,
						"acc_stderr,none": 0.009721489519176299,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.26211571306575576,
						"acc_norm,none": 0.2941929974380871,
						"acc_norm_stderr,none": 0.003329141835183525,
						"acc_stderr,none": 0.0032130228239864067,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.02191237788577997,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.752440051486513,
						"acc_norm,none": 0.622951179453004,
						"acc_norm_stderr,none": 0.007880964636376735,
						"acc_stderr,none": 0.154441954088151,
						"alias": "pythia",
						"bits_per_byte,none": 0.6393202013936226,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5575950468323259,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.123631827950768,
						"perplexity_stderr,none": 0.060781252986980915,
						"word_perplexity,none": 10.694026261310556,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3723404255319149,
						"acc_norm,none": 0.425531914893617,
						"acc_norm_stderr,none": 0.054361997567311325,
						"acc_stderr,none": 0.04760751849429055,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.44166666666666665,
						"acc_norm,none": 0.525,
						"acc_norm_stderr,none": 0.045777595341980594,
						"acc_stderr,none": 0.04552192400253557,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.31875,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.03885143449429052,
						"acc_stderr,none": 0.036955560385363254,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3732394366197183,
						"acc_norm,none": 0.397887323943662,
						"acc_norm_stderr,none": 0.029095492917064897,
						"acc_stderr,none": 0.028750895488989205,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.006765015986877446,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7544892406628741,
						"acc_stderr,none": 0.00214050028784654,
						"alias": "qqp",
						"f1,none": 0.7246296399045664,
						"f1_stderr,none": 0.002653546785737591
					},
					"race": {
						"acc,none": 0.3464114832535885,
						"acc_stderr,none": 0.014726451021782803,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2626,
						"em_stderr,none": 0.004400688651342955,
						"f1,none": 0.2723752383440733,
						"f1_stderr,none": 0.004413449576815405
					},
					"rte": {
						"acc,none": 0.7003610108303249,
						"acc_stderr,none": 0.02757437014529261,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.93,
						"acc_norm,none": 0.931,
						"acc_norm_stderr,none": 0.008018934050315153,
						"acc_stderr,none": 0.008072494358323495,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6931407942238267,
						"acc_stderr,none": 0.027760403038058965,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.9048165137614679,
						"acc_stderr,none": 0.009943790947096227,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5905228431470558,
						"acc_norm,none": 0.7783664900529841,
						"acc_norm_stderr,none": 0.0029365709592193297,
						"acc_stderr,none": 0.003476673543589847,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6703271105786829,
						"acc_stderr,none": 0.08862139099201637,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5843349358974359,
						"acc_stderr,none": 0.0049325574571340396,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8587209891557718,
						"acc_stderr,none": 0.003506665223133951,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5722549019607843,
						"acc_stderr,none": 0.004899011799705543,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.34013043916503455,
						"acc_stderr,none": 0.0015214412084548256,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3402692778457772,
						"bleu_acc_stderr,none": 0.016586304901762553,
						"bleu_diff,none": -6.3885216667008216,
						"bleu_diff_stderr,none": 0.8237728208335762,
						"bleu_max,none": 25.848948085807137,
						"bleu_max_stderr,none": 0.7864128636857829,
						"rouge1_acc,none": 0.30966952264381886,
						"rouge1_acc_stderr,none": 0.016185744355144912,
						"rouge1_diff,none": -8.407910889709958,
						"rouge1_diff_stderr,none": 0.9109032325137808,
						"rouge1_max,none": 51.46798847863768,
						"rouge1_max_stderr,none": 0.8544738802487006,
						"rouge2_acc,none": 0.27539779681762544,
						"rouge2_acc_stderr,none": 0.015638135667775523,
						"rouge2_diff,none": -9.952391260329206,
						"rouge2_diff_stderr,none": 1.0960462809375815,
						"rouge2_max,none": 35.498492375426586,
						"rouge2_max_stderr,none": 0.9955292745738608,
						"rougeL_acc,none": 0.3108935128518972,
						"rougeL_acc_stderr,none": 0.016203316673559693,
						"rougeL_diff,none": -8.54103962629304,
						"rougeL_diff_stderr,none": 0.9259882554244762,
						"rougeL_max,none": 48.57435182019648,
						"rougeL_max_stderr,none": 0.868054036707988
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3402692778457772,
						"bleu_acc_stderr,none": 0.016586304901762553,
						"bleu_diff,none": -6.3885216667008216,
						"bleu_diff_stderr,none": 0.8237728208335762,
						"bleu_max,none": 25.848948085807137,
						"bleu_max_stderr,none": 0.7864128636857829,
						"rouge1_acc,none": 0.30966952264381886,
						"rouge1_acc_stderr,none": 0.016185744355144912,
						"rouge1_diff,none": -8.407910889709958,
						"rouge1_diff_stderr,none": 0.9109032325137808,
						"rouge1_max,none": 51.46798847863768,
						"rouge1_max_stderr,none": 0.8544738802487006,
						"rouge2_acc,none": 0.27539779681762544,
						"rouge2_acc_stderr,none": 0.015638135667775523,
						"rouge2_diff,none": -9.952391260329206,
						"rouge2_diff_stderr,none": 1.0960462809375815,
						"rouge2_max,none": 35.498492375426586,
						"rouge2_max_stderr,none": 0.9955292745738608,
						"rougeL_acc,none": 0.3108935128518972,
						"rougeL_acc_stderr,none": 0.016203316673559693,
						"rougeL_diff,none": -8.54103962629304,
						"rougeL_diff_stderr,none": 0.9259882554244762,
						"rougeL_max,none": 48.57435182019648,
						"rougeL_max_stderr,none": 0.868054036707988
					},
					"truthfulqa_mc1": {
						"acc,none": 0.26805385556915545,
						"acc_stderr,none": 0.015506204722834553,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4122070227609136,
						"acc_stderr,none": 0.014268999975578912,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.05216535433070866,
						"exact_match_stderr,none": 0.004934037077281569
					},
					"wic": {
						"acc,none": 0.5219435736677116,
						"acc_stderr,none": 0.019791633564310455,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6393202013936226,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5575950468323259,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.694026261310556,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7332280978689818,
						"acc_stderr,none": 0.012430046102144337,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.49295774647887325,
						"acc_stderr,none": 0.059755502635482904,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.04744733393277919,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8571428571428571,
						"acc_stderr,none": 0.021217447349500148,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6232727272727273,
						"acc_stderr,none": 0.07228347229489104,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.618,
						"acc_stderr,none": 0.02175082059125084,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.02059164957122493,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.0195369235747476,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.02238235778196214,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.02224224437573102,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.648,
						"acc_stderr,none": 0.02138004238594606,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.020186703693570847,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177524,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43815261044176707,
						"acc_stderr,none": 0.04704126251631503,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939166,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4755020080321285,
						"acc_stderr,none": 0.010010036112667854,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4835341365461847,
						"acc_stderr,none": 0.010016636930829975,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.4108433734939759,
						"acc_stderr,none": 0.009861456841490835,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5353413654618474,
						"acc_stderr,none": 0.009997006138567242,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4682730923694779,
						"acc_stderr,none": 0.010001876146466708,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4899598393574297,
						"acc_stderr,none": 0.010020052116889137,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42771084337349397,
						"acc_stderr,none": 0.009916774564942348,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4843373493975904,
						"acc_stderr,none": 0.010017154458106753,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.41566265060240964,
						"acc_stderr,none": 0.00987847434182292,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42610441767068274,
						"acc_stderr,none": 0.00991201637745907,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4562248995983936,
						"acc_stderr,none": 0.00998358919769393,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40441767068273093,
						"acc_stderr,none": 0.009837245625453,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41285140562248995,
						"acc_stderr,none": 0.00986866594308441,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3469879518072289,
						"acc_stderr,none": 0.009541251561568397,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6329944046687925,
						"acc_stderr,none": 0.060562882816996615,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5923229649238915,
						"acc_stderr,none": 0.012645876488040303,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.771674387822634,
						"acc_stderr,none": 0.010802042577302285,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7233620119126406,
						"acc_stderr,none": 0.011511854288593795,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5784248841826605,
						"acc_stderr,none": 0.012707862131801903,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6022501654533422,
						"acc_stderr,none": 0.012595197856703514,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6737260092653872,
						"acc_stderr,none": 0.012065474625979069,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5420251489080079,
						"acc_stderr,none": 0.012821595164245275,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.686962276637988,
						"acc_stderr,none": 0.011933732786576634,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5592322964923891,
						"acc_stderr,none": 0.01277651858633279,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5830575777630708,
						"acc_stderr,none": 0.012688354121607803,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6499007279947054,
						"acc_stderr,none": 0.012275258369751086,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8154641492470218,
						"acc_stderr,none": 0.03811008839661942,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8761290322580645,
						"acc_stderr,none": 0.00683361864926894,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.0506639425494172,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7372262773722628,
						"acc_stderr,none": 0.01422029531609415,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.8136882129277566,
						"acc_stderr,none": 0.024054621770299663,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6952380952380952,
						"acc_stderr,none": 0.02597659935230537,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7797619047619048,
						"acc_stderr,none": 0.018477501049056294,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/225-EagleX-PreFT-F"
	},
	"./rwkv-x-dev/Eagle-225-1FT": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6428974069898534,
						"acc_norm,none": 0.6358511837655016,
						"acc_norm_stderr,none": 0.08472093374268207,
						"acc_stderr,none": 0.1079146112825111,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4734375,
						"acc_stderr,none": 0.056648696121819386,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.436,
						"acc_stderr,none": 0.20335181992639742,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8224776119402986,
						"acc_stderr,none": 0.1616319876306409,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2763744427934622,
						"acc_norm,none": 0.2763744427934622,
						"acc_norm_stderr,none": 0.12093283782238075,
						"acc_stderr,none": 0.12093283782238075,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.3006389224658954,
						"acc_norm,none": 0.3006389224658954,
						"acc_norm_stderr,none": 0.056411844725681275,
						"acc_stderr,none": 0.056411844725681275,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.6373173822301728,
						"likelihood_diff_stderr,none": 0.5327162538113843,
						"pct_stereotype,none": 0.6177698270721528,
						"pct_stereotype_stderr,none": 0.06760742841256165
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.12450787401574803,
						"exact_match_stderr,none": 0.007326044786419023
					},
					"glue": {
						"acc,none": 0.7468437351119581,
						"acc_stderr,none": 0.004647103645416364,
						"alias": "glue",
						"f1,none": 0.688406848367625,
						"f1_stderr,none": 0.00015325048490247542,
						"mcc,none": 0.1681096950363583,
						"mcc_stderr,none": 0.023633129903434982
					},
					"kmmlu": {
						"acc,none": 0.25350851862546925,
						"acc_norm,none": 0.25350851862546925,
						"acc_norm_stderr,none": 0.022762737360912742,
						"acc_stderr,none": 0.022762737360912742,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5608419206314406,
						"acc_norm,none": 0.546,
						"acc_norm_stderr,none": 0.0004967615230460869,
						"acc_stderr,none": 0.05413823602594502,
						"alias": "kobest",
						"f1,none": 0.5168425089886489,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7163788084610906,
						"acc_stderr,none": 0.021918831653687878,
						"alias": "lambada",
						"perplexity,none": 3.6144469777749686,
						"perplexity_stderr,none": 0.2463954915414085
					},
					"lambada_cloze": {
						"acc,none": 0.0361925092179313,
						"acc_stderr,none": 0.00316071616136545,
						"alias": "lambada_cloze",
						"perplexity,none": 521.6564771337919,
						"perplexity_stderr,none": 93.6747242720253
					},
					"lambada_multilingual": {
						"acc,none": 0.5439937900252281,
						"acc_stderr,none": 0.08772152576468445,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.904988395755094,
						"perplexity_stderr,none": 8.280704599420616
					},
					"mmlu": {
						"acc,none": 0.4125480700754878,
						"acc_stderr,none": 0.09027862756450644,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.39064824654622743,
						"acc_stderr,none": 0.096048292049521,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.46057289990344386,
						"acc_stderr,none": 0.08539426945163622,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.46376340591485216,
						"acc_stderr,none": 0.07434625886544384,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.34792261338407865,
						"acc_stderr,none": 0.07043041013199013,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.3873669268985096,
						"acc_norm,none": 0.36050234133502,
						"acc_norm_stderr,none": 0.00011154735371387975,
						"acc_stderr,none": 0.08174974322194567,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.48542857142857143,
						"acc_stderr,none": 0.0533488017756697,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7466147264761285,
						"acc_norm,none": 0.6398228883396612,
						"acc_norm_stderr,none": 0.00931138936130364,
						"acc_stderr,none": 0.15323417083329235,
						"alias": "pythia",
						"bits_per_byte,none": 0.6339162169534232,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5517715883768304,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.1430043279833377,
						"perplexity_stderr,none": 0.06078958209458802,
						"word_perplexity,none": 10.481953286429421,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3971631205673759,
						"acc_norm,none": 0.4521276595744681,
						"acc_norm_stderr,none": 0.05921656033438769,
						"acc_stderr,none": 0.044889894198871454,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6748194735616119,
						"acc_stderr,none": 0.06325286648771192,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.34066057096857333,
						"acc_stderr,none": 0.0014986143632334042,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3402692778457772,
						"bleu_acc_stderr,none": 0.016586304901762553,
						"bleu_diff,none": -6.043849772068066,
						"bleu_diff_stderr,none": 0.8765634826745657,
						"bleu_max,none": 27.163948579181803,
						"bleu_max_stderr,none": 0.7931898909880212,
						"rouge1_acc,none": 0.2974296205630355,
						"rouge1_acc_stderr,none": 0.016002651487361002,
						"rouge1_diff,none": -7.609244770198295,
						"rouge1_diff_stderr,none": 0.9643413459782677,
						"rouge1_max,none": 52.933340683269385,
						"rouge1_max_stderr,none": 0.8471478288298345,
						"rouge2_acc,none": 0.2778457772337821,
						"rouge2_acc_stderr,none": 0.015680929364024643,
						"rouge2_diff,none": -9.038968149823395,
						"rouge2_diff_stderr,none": 1.1549083395656679,
						"rouge2_max,none": 37.32845620528283,
						"rouge2_max_stderr,none": 0.9968245606197573,
						"rougeL_acc,none": 0.29865361077111385,
						"rougeL_acc_stderr,none": 0.016021570613768545,
						"rougeL_diff,none": -7.8440069575113505,
						"rougeL_diff_stderr,none": 0.9886490732442875,
						"rougeL_max,none": 49.95832859521396,
						"rougeL_max_stderr,none": 0.8649734725042382
					},
					"xcopa": {
						"acc,none": 0.6201818181818182,
						"acc_stderr,none": 0.06451327389540393,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43180722891566264,
						"acc_stderr,none": 0.046774996360704875,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6312496239696769,
						"acc_stderr,none": 0.055551939219346536,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8168127669139132,
						"acc_stderr,none": 0.040190973040705194,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6428974069898534,
						"acc_norm,none": 0.6358511837655016,
						"acc_norm_stderr,none": 0.08472093374268207,
						"acc_stderr,none": 0.1079146112825111,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4734375,
						"acc_stderr,none": 0.056648696121819386,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.591,
						"acc_stderr,none": 0.015555094373257946,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.428,
						"acc_stderr,none": 0.015654426245029284,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.41333333333333333,
						"acc_stderr,none": 0.014221202817696512,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.41467576791808874,
						"acc_norm,none": 0.45733788395904434,
						"acc_norm_stderr,none": 0.014558106543924067,
						"acc_stderr,none": 0.014397070564409174,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7554713804713805,
						"acc_norm,none": 0.7239057239057239,
						"acc_norm_stderr,none": 0.009173559873835266,
						"acc_stderr,none": 0.008819461106822597,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.436,
						"acc_stderr,none": 0.20335181992639742,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.4575,
						"acc_stderr,none": 0.011142663706548622,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.752,
						"acc_stderr,none": 0.009658915432058835,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.249,
						"acc_stderr,none": 0.009671932233869848,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.9955,
						"acc_stderr,none": 0.0014969954902233175,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.292,
						"acc_stderr,none": 0.010169548163754639,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.401,
						"acc_stderr,none": 0.010961732517713438,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.271,
						"acc_stderr,none": 0.00994127328148805,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.4215,
						"acc_stderr,none": 0.01104444950789628,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.289,
						"acc_stderr,none": 0.010138584489351777,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.2315,
						"acc_stderr,none": 0.0094338949637514,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8224776119402986,
						"acc_stderr,none": 0.1616319876306409,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696844,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.003845749574503006,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329813,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847165,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557415,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.012461592646659978,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.014526080235459544,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244087,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151075,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346933,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256581,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639227,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.005568393575081349,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897885,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697589,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406723,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.0040899544896890894,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177549,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.809,
						"acc_stderr,none": 0.012436787112179494,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.662,
						"acc_stderr,none": 0.014965960710224489,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.014498627873361425,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919289,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.00997775303139723,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306475,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.015790799515836763,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333449,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.011884495834541655,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.589,
						"acc_stderr,none": 0.015566673418599275,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.711,
						"acc_stderr,none": 0.014341711358296186,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523708,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074796,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139981,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.00918887563499666,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.013232501619085343,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557424,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.275,
						"acc_stderr,none": 0.014127086556490531,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.651,
						"acc_stderr,none": 0.015080663991563093,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.599,
						"acc_stderr,none": 0.015506109745498325,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366644,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.639,
						"acc_stderr,none": 0.015195720118175118,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.01039129342184988,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.00868051561552373,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.012864077288499347,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578213,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.00944924802766274,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389626,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.00985982840703718,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.013807775152234187,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.798,
						"acc_stderr,none": 0.012702651587655163,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140933,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785134,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426566,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.583,
						"acc_stderr,none": 0.015599819048769618,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.535,
						"acc_stderr,none": 0.015780495050030166,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336664,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118747,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.615,
						"acc_stderr,none": 0.015395194445410806,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.011419913065098694,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787728,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.297,
						"acc_stderr,none": 0.014456832294801103,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.819,
						"acc_stderr,none": 0.012181436179177897,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491104,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.011266140684632154,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099202,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697589,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.385,
						"acc_stderr,none": 0.015395194445410808,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.383,
						"acc_stderr,none": 0.0153801023256527,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7180428134556575,
						"acc_stderr,none": 0.00786972023868448,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8571428571428571,
						"acc_stderr,none": 0.04718416136255828,
						"alias": "cb",
						"f1,none": 0.7009189640768588,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2763744427934622,
						"acc_norm,none": 0.2763744427934622,
						"acc_norm_stderr,none": 0.12093283782238075,
						"acc_stderr,none": 0.12093283782238075,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5151515151515151,
						"acc_norm,none": 0.5151515151515151,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0879391124952055,
						"acc_stderr,none": 0.0879391124952055,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.0659529705144534,
						"acc_stderr,none": 0.0659529705144534,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.06288639360110458,
						"acc_stderr,none": 0.06288639360110458,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857371,
						"acc_stderr,none": 0.08982552969857371,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063838,
						"acc_stderr,none": 0.07213122508063838,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.11433239009500591,
						"acc_stderr,none": 0.11433239009500591,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894599,
						"acc_stderr,none": 0.10540925533894599,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482894,
						"acc_stderr,none": 0.09810018692482894,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966263,
						"acc_stderr,none": 0.06520506636966263,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5454545454545454,
						"acc_norm,none": 0.5454545454545454,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.08742975048915691,
						"acc_stderr,none": 0.08742975048915691,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.07497837474124878,
						"acc_stderr,none": 0.07497837474124878,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.3695652173913043,
						"acc_norm,none": 0.3695652173913043,
						"acc_norm_stderr,none": 0.07195473383945741,
						"acc_stderr,none": 0.07195473383945741,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.3006389224658954,
						"acc_norm,none": 0.3006389224658954,
						"acc_norm_stderr,none": 0.056411844725681275,
						"acc_stderr,none": 0.056411844725681275,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.31952662721893493,
						"acc_norm,none": 0.31952662721893493,
						"acc_norm_stderr,none": 0.03597530251676527,
						"acc_stderr,none": 0.03597530251676527,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.28378378378378377,
						"acc_norm,none": 0.28378378378378377,
						"acc_norm_stderr,none": 0.03718409321285373,
						"acc_stderr,none": 0.03718409321285373,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.4125,
						"acc_norm,none": 0.4125,
						"acc_norm_stderr,none": 0.039040677866833816,
						"acc_stderr,none": 0.039040677866833816,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0347769116216366,
						"acc_stderr,none": 0.0347769116216366,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3397129186602871,
						"acc_norm,none": 0.3397129186602871,
						"acc_norm_stderr,none": 0.03283906353745934,
						"acc_stderr,none": 0.03283906353745934,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03489370652018761,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.29770992366412213,
						"acc_norm,none": 0.29770992366412213,
						"acc_norm_stderr,none": 0.040103589424622034,
						"acc_stderr,none": 0.040103589424622034,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.040718744426068945,
						"acc_stderr,none": 0.040718744426068945,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.04556837693674772,
						"acc_stderr,none": 0.04556837693674772,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3126934984520124,
						"acc_norm,none": 0.3126934984520124,
						"acc_norm_stderr,none": 0.02583489590078724,
						"acc_stderr,none": 0.02583489590078724,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.03166009679399813,
						"acc_stderr,none": 0.03166009679399813,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3687150837988827,
						"acc_norm,none": 0.3687150837988827,
						"acc_norm_stderr,none": 0.036161643250458134,
						"acc_stderr,none": 0.036161643250458134,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422647,
						"acc_stderr,none": 0.028146970599422647,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.04084247315337099,
						"acc_stderr,none": 0.04084247315337099,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.4205607476635514,
						"acc_norm,none": 0.4205607476635514,
						"acc_norm_stderr,none": 0.04794743635189597,
						"acc_stderr,none": 0.04794743635189597,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809826,
						"acc_stderr,none": 0.039578354719809826,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.04429811949614584,
						"acc_stderr,none": 0.04429811949614584,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.041988576623712234,
						"acc_stderr,none": 0.041988576623712234,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.26373626373626374,
						"acc_norm,none": 0.26373626373626374,
						"acc_norm_stderr,none": 0.02671881407296754,
						"acc_stderr,none": 0.02671881407296754,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3284313725490196,
						"acc_norm,none": 0.3284313725490196,
						"acc_norm_stderr,none": 0.03296245110172229,
						"acc_stderr,none": 0.03296245110172229,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.27485380116959063,
						"acc_norm,none": 0.27485380116959063,
						"acc_norm_stderr,none": 0.034240429246915824,
						"acc_stderr,none": 0.034240429246915824,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.03694846055443904,
						"acc_stderr,none": 0.03694846055443904,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3270440251572327,
						"acc_norm,none": 0.3270440251572327,
						"acc_norm_stderr,none": 0.0373222564649312,
						"acc_stderr,none": 0.0373222564649312,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3558282208588957,
						"acc_norm,none": 0.3558282208588957,
						"acc_norm_stderr,none": 0.03761521380046734,
						"acc_stderr,none": 0.03761521380046734,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.3023255813953488,
						"acc_norm,none": 0.3023255813953488,
						"acc_norm_stderr,none": 0.03512091263428369,
						"acc_stderr,none": 0.03512091263428369,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.028394293050790515,
						"acc_stderr,none": 0.028394293050790515,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.031911782267135466,
						"acc_stderr,none": 0.031911782267135466,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.032145368597886394,
						"acc_stderr,none": 0.032145368597886394,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.25217391304347825,
						"acc_norm,none": 0.25217391304347825,
						"acc_norm_stderr,none": 0.02869674529449336,
						"acc_stderr,none": 0.02869674529449336,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.03822127078536156,
						"acc_stderr,none": 0.03822127078536156,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.30113636363636365,
						"acc_norm,none": 0.30113636363636365,
						"acc_norm_stderr,none": 0.03467837977202437,
						"acc_stderr,none": 0.03467837977202437,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.30201342281879195,
						"acc_norm,none": 0.30201342281879195,
						"acc_norm_stderr,none": 0.03774033930941344,
						"acc_stderr,none": 0.03774033930941344,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.288135593220339,
						"acc_norm,none": 0.288135593220339,
						"acc_norm_stderr,none": 0.04187011593049808,
						"acc_stderr,none": 0.04187011593049808,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.21951219512195122,
						"acc_norm,none": 0.21951219512195122,
						"acc_norm_stderr,none": 0.03242041613395385,
						"acc_stderr,none": 0.03242041613395385,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.30158730158730157,
						"acc_norm,none": 0.30158730158730157,
						"acc_norm_stderr,none": 0.04104947269903394,
						"acc_stderr,none": 0.04104947269903394,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.032530209055933366,
						"acc_stderr,none": 0.032530209055933366,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3488372093023256,
						"acc_norm,none": 0.3488372093023256,
						"acc_norm_stderr,none": 0.03644669348694787,
						"acc_stderr,none": 0.03644669348694787,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.27007299270072993,
						"acc_norm,none": 0.27007299270072993,
						"acc_norm_stderr,none": 0.02192746197287115,
						"acc_stderr,none": 0.02192746197287115,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.42990654205607476,
						"acc_norm,none": 0.42990654205607476,
						"acc_norm_stderr,none": 0.033921125520669684,
						"acc_stderr,none": 0.033921125520669684,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3008130081300813,
						"acc_norm,none": 0.3008130081300813,
						"acc_norm_stderr,none": 0.04152073768551428,
						"acc_stderr,none": 0.04152073768551428,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3114754098360656,
						"acc_norm,none": 0.3114754098360656,
						"acc_norm_stderr,none": 0.04209969267310141,
						"acc_stderr,none": 0.04209969267310141,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.34285714285714286,
						"acc_norm,none": 0.34285714285714286,
						"acc_norm_stderr,none": 0.032833210696431546,
						"acc_stderr,none": 0.032833210696431546,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.03387720998298804,
						"acc_stderr,none": 0.03387720998298804,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3439153439153439,
						"acc_norm,none": 0.3439153439153439,
						"acc_norm_stderr,none": 0.03464390125743289,
						"acc_stderr,none": 0.03464390125743289,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.04167808180844153,
						"acc_stderr,none": 0.04167808180844153,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.03855289616378948,
						"acc_stderr,none": 0.03855289616378948,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3238095238095238,
						"acc_norm,none": 0.3238095238095238,
						"acc_norm_stderr,none": 0.04588414718067474,
						"acc_stderr,none": 0.04588414718067474,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03424737867752742,
						"acc_stderr,none": 0.03424737867752742,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767256,
						"acc_stderr,none": 0.030113040167767256,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2632978723404255,
						"acc_norm,none": 0.2632978723404255,
						"acc_norm_stderr,none": 0.022743327388426438,
						"acc_stderr,none": 0.022743327388426438,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.35344827586206895,
						"acc_norm,none": 0.35344827586206895,
						"acc_norm_stderr,none": 0.031452746950022696,
						"acc_stderr,none": 0.031452746950022696,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3390804597701149,
						"acc_norm,none": 0.3390804597701149,
						"acc_norm_stderr,none": 0.03599172203897236,
						"acc_stderr,none": 0.03599172203897236,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3185840707964602,
						"acc_norm,none": 0.3185840707964602,
						"acc_norm_stderr,none": 0.031061820840326118,
						"acc_stderr,none": 0.031061820840326118,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.32727272727272727,
						"acc_norm,none": 0.32727272727272727,
						"acc_norm_stderr,none": 0.03663974994391242,
						"acc_stderr,none": 0.03663974994391242,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.032530209055933366,
						"acc_stderr,none": 0.032530209055933366,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.28994082840236685,
						"acc_norm,none": 0.28994082840236685,
						"acc_norm_stderr,none": 0.03500638924911012,
						"acc_stderr,none": 0.03500638924911012,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2981366459627329,
						"acc_norm,none": 0.2981366459627329,
						"acc_norm_stderr,none": 0.03616379286462019,
						"acc_stderr,none": 0.03616379286462019,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.31875,
						"acc_norm,none": 0.31875,
						"acc_norm_stderr,none": 0.036955560385363254,
						"acc_stderr,none": 0.036955560385363254,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.1681096950363583,
						"mcc_stderr,none": 0.023633129903434982
					},
					"copa": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.03588702812826371,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.6373173822301728,
						"likelihood_diff_stderr,none": 0.5327162538113843,
						"pct_stereotype,none": 0.6177698270721528,
						"pct_stereotype_stderr,none": 0.06760742841256165
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.9213625521765056,
						"likelihood_diff_stderr,none": 0.09225031794556265,
						"pct_stereotype,none": 0.6499701848539058,
						"pct_stereotype_stderr,none": 0.011650973912575054
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.041208791208791,
						"likelihood_diff_stderr,none": 0.4121368001118892,
						"pct_stereotype,none": 0.6593406593406593,
						"pct_stereotype_stderr,none": 0.04995670951276871
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.715909090909091,
						"likelihood_diff_stderr,none": 1.4542346969175948,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.061538461538461,
						"likelihood_diff_stderr,none": 0.6177227965700853,
						"pct_stereotype,none": 0.7538461538461538,
						"pct_stereotype_stderr,none": 0.05384615384615383
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.837109375,
						"likelihood_diff_stderr,none": 0.16662718987982708,
						"pct_stereotype,none": 0.61875,
						"pct_stereotype_stderr,none": 0.027193630402775476
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.881365740740741,
						"likelihood_diff_stderr,none": 0.25948861754988745,
						"pct_stereotype,none": 0.6111111111111112,
						"pct_stereotype_stderr,none": 0.03324708911809117
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.479166666666667,
						"likelihood_diff_stderr,none": 0.390241889594603,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.80880905511811,
						"likelihood_diff_stderr,none": 0.16711400721430109,
						"pct_stereotype,none": 0.5748031496062992,
						"pct_stereotype_stderr,none": 0.021955867910832084
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.9436936936936937,
						"likelihood_diff_stderr,none": 0.3590903668024958,
						"pct_stereotype,none": 0.7117117117117117,
						"pct_stereotype_stderr,none": 0.04318860867532051
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.125,
						"likelihood_diff_stderr,none": 0.4815282425157905,
						"pct_stereotype,none": 0.8924731182795699,
						"pct_stereotype_stderr,none": 0.03229700003364003
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.345394736842105,
						"likelihood_diff_stderr,none": 0.2515012898291121,
						"pct_stereotype,none": 0.7,
						"pct_stereotype_stderr,none": 0.03333333333333336
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.3508124627310676,
						"likelihood_diff_stderr,none": 0.07852461531730219,
						"pct_stereotype,none": 0.5855694692903995,
						"pct_stereotype_stderr,none": 0.012033115254328987
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.6694444444444443,
						"likelihood_diff_stderr,none": 0.3211448528423643,
						"pct_stereotype,none": 0.5888888888888889,
						"pct_stereotype_stderr,none": 0.052155640611075534
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.8653846153846154,
						"likelihood_diff_stderr,none": 0.8532626789043604,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.901515151515151,
						"likelihood_diff_stderr,none": 0.4746142706878124,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.056159743502623156
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.769470404984424,
						"likelihood_diff_stderr,none": 0.13327267141158228,
						"pct_stereotype,none": 0.6230529595015576,
						"pct_stereotype_stderr,none": 0.02709116375533661
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.4970355731225298,
						"likelihood_diff_stderr,none": 0.20228846624438102,
						"pct_stereotype,none": 0.4268774703557312,
						"pct_stereotype_stderr,none": 0.031158395621279214
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.4479166666666665,
						"likelihood_diff_stderr,none": 0.4341670355625479,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.05594542388644592
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.135869565217391,
						"likelihood_diff_stderr,none": 0.1600658366042133,
						"pct_stereotype,none": 0.48043478260869565,
						"pct_stereotype_stderr,none": 0.02332012708760827
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.1847826086956523,
						"likelihood_diff_stderr,none": 0.27422811296045174,
						"pct_stereotype,none": 0.7652173913043478,
						"pct_stereotype_stderr,none": 0.039698395317531235
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.381868131868132,
						"likelihood_diff_stderr,none": 0.31734409561304555,
						"pct_stereotype,none": 0.8021978021978022,
						"pct_stereotype_stderr,none": 0.041988952031962214
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.055803571428571,
						"likelihood_diff_stderr,none": 0.24925499562442802,
						"pct_stereotype,none": 0.7091836734693877,
						"pct_stereotype_stderr,none": 0.03252156607969809
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.12450787401574803,
						"exact_match_stderr,none": 0.007326044786419023
					},
					"glue": {
						"acc,none": 0.7468437351119581,
						"acc_stderr,none": 0.004647103645416364,
						"alias": "glue",
						"f1,none": 0.688406848367625,
						"f1_stderr,none": 0.00015325048490247542,
						"mcc,none": 0.1681096950363583,
						"mcc_stderr,none": 0.023633129903434982
					},
					"hellaswag": {
						"acc,none": 0.5533758215494922,
						"acc_norm,none": 0.7428799044015136,
						"acc_norm_stderr,none": 0.004361529679492745,
						"acc_stderr,none": 0.004961268387512964,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.25350851862546925,
						"acc_norm,none": 0.25350851862546925,
						"acc_norm_stderr,none": 0.022762737360912742,
						"acc_stderr,none": 0.022762737360912742,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.231,
						"acc_norm,none": 0.231,
						"acc_norm_stderr,none": 0.013334797216936431,
						"acc_stderr,none": 0.013334797216936431,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.251,
						"acc_norm,none": 0.251,
						"acc_norm_stderr,none": 0.013718133516888914,
						"acc_stderr,none": 0.013718133516888914,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796285,
						"acc_stderr,none": 0.013996674851796285,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.013663187134877654,
						"acc_stderr,none": 0.013663187134877654,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.23666666666666666,
						"acc_norm,none": 0.23666666666666666,
						"acc_norm_stderr,none": 0.017366497958564618,
						"acc_stderr,none": 0.017366497958564618,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.013394902889660007,
						"acc_stderr,none": 0.013394902889660007,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.296,
						"acc_norm,none": 0.296,
						"acc_norm_stderr,none": 0.01444273494157502,
						"acc_stderr,none": 0.01444273494157502,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920838,
						"acc_stderr,none": 0.013512312258920838,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.031652557907861936,
						"acc_stderr,none": 0.031652557907861936,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.014095022868717598,
						"acc_stderr,none": 0.014095022868717598,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2153846153846154,
						"acc_norm,none": 0.2153846153846154,
						"acc_norm_stderr,none": 0.03619435936612662,
						"acc_stderr,none": 0.03619435936612662,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.04512608598542127,
						"acc_stderr,none": 0.04512608598542127,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.01355063170555596,
						"acc_stderr,none": 0.01355063170555596,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740666,
						"acc_stderr,none": 0.014142984975740666,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314147,
						"acc_stderr,none": 0.013644675781314147,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.219,
						"acc_norm,none": 0.219,
						"acc_norm_stderr,none": 0.013084731950262026,
						"acc_stderr,none": 0.013084731950262026,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234197,
						"acc_stderr,none": 0.013807775152234197,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.01362606581775064,
						"acc_stderr,none": 0.01362606581775064,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.013790038620872844,
						"acc_stderr,none": 0.013790038620872844,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.251,
						"acc_norm,none": 0.251,
						"acc_norm_stderr,none": 0.01371813351688892,
						"acc_stderr,none": 0.01371813351688892,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.04512608598542126,
						"acc_stderr,none": 0.04512608598542126,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.01351231225892084,
						"acc_stderr,none": 0.01351231225892084,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.014095022868717586,
						"acc_stderr,none": 0.014095022868717586,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881445,
						"acc_stderr,none": 0.013588548437881445,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.229,
						"acc_norm,none": 0.229,
						"acc_norm_stderr,none": 0.013294199326613592,
						"acc_stderr,none": 0.013294199326613592,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.01382541652689504,
						"acc_stderr,none": 0.01382541652689504,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.013493000446937594,
						"acc_stderr,none": 0.013493000446937594,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.017808806510137845,
						"acc_stderr,none": 0.017808806510137845,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259729,
						"acc_stderr,none": 0.013929286594259729,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809946,
						"acc_stderr,none": 0.013963164754809946,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.253,
						"acc_norm,none": 0.253,
						"acc_norm_stderr,none": 0.01375427861358708,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485247,
						"acc_stderr,none": 0.014174516461485247,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.04163331998932269,
						"acc_stderr,none": 0.04163331998932269,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.2633333333333333,
						"acc_norm,none": 0.2633333333333333,
						"acc_norm_stderr,none": 0.025471401031969213,
						"acc_stderr,none": 0.025471401031969213,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306265,
						"acc_stderr,none": 0.014158794845306265,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.233,
						"acc_norm,none": 0.233,
						"acc_norm_stderr,none": 0.01337497251922009,
						"acc_stderr,none": 0.01337497251922009,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.013825416526895043,
						"acc_stderr,none": 0.013825416526895043,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.02912242397001744,
						"acc_stderr,none": 0.02912242397001744,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.236,
						"acc_norm,none": 0.236,
						"acc_norm_stderr,none": 0.01343445140243869,
						"acc_stderr,none": 0.01343445140243869,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809946,
						"acc_stderr,none": 0.013963164754809946,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.029365141882663322,
						"acc_stderr,none": 0.029365141882663322,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.014370995982377933,
						"acc_stderr,none": 0.014370995982377933,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5608419206314406,
						"acc_norm,none": 0.546,
						"acc_norm_stderr,none": 0.0004967615230460869,
						"acc_stderr,none": 0.05413823602594502,
						"alias": "kobest",
						"f1,none": 0.5168425089886489,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.6210826210826211,
						"acc_stderr,none": 0.012951441710828772,
						"alias": " - kobest_boolq",
						"f1,none": 0.6111856077957774,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.631,
						"acc_stderr,none": 0.015266698139154622,
						"alias": " - kobest_copa",
						"f1,none": 0.6301830744113748,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.422,
						"acc_norm,none": 0.546,
						"acc_norm_stderr,none": 0.02228814759117695,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.4183444300121193,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5491183879093199,
						"acc_stderr,none": 0.025004412942296043,
						"alias": " - kobest_sentineg",
						"f1,none": 0.4478785477317048,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.49682539682539684,
						"acc_stderr,none": 0.014091213185340047,
						"alias": " - kobest_wic",
						"f1,none": 0.38258017244844017,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7163788084610906,
						"acc_stderr,none": 0.021918831653687878,
						"alias": "lambada",
						"perplexity,none": 3.6144469777749686,
						"perplexity_stderr,none": 0.2463954915414085
					},
					"lambada_cloze": {
						"acc,none": 0.0361925092179313,
						"acc_stderr,none": 0.00316071616136545,
						"alias": "lambada_cloze",
						"perplexity,none": 521.6564771337919,
						"perplexity_stderr,none": 93.6747242720253
					},
					"lambada_multilingual": {
						"acc,none": 0.5439937900252281,
						"acc_stderr,none": 0.08772152576468445,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.904988395755094,
						"perplexity_stderr,none": 8.280704599420616
					},
					"lambada_openai": {
						"acc,none": 0.7593634775858723,
						"acc_stderr,none": 0.005955492703491451,
						"alias": " - lambada_openai",
						"perplexity,none": 3.1430043279833377,
						"perplexity_stderr,none": 0.06078958209458802
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.03978265088298079,
						"acc_stderr,none": 0.0027229753280860617,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 338.0551414821208,
						"perplexity_stderr,none": 11.581803933669871
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4302348146710654,
						"acc_stderr,none": 0.006897835015074963,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.67571265394163,
						"perplexity_stderr,none": 1.9481192457595293
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7581991073161265,
						"acc_stderr,none": 0.005965305048434235,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.1448129106483345,
						"perplexity_stderr,none": 0.06080104854020486
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4574034542984669,
						"acc_stderr,none": 0.00694065256687139,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.477147661932488,
						"perplexity_stderr,none": 1.4113304031830556
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5532699398408694,
						"acc_stderr,none": 0.0069263303079770315,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.42822336206449,
						"perplexity_stderr,none": 0.8068021436774651
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5208616339996118,
						"acc_stderr,none": 0.006959911720851461,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.79904539018854,
						"perplexity_stderr,none": 1.1679185444612057
					},
					"lambada_standard": {
						"acc,none": 0.6743644478944304,
						"acc_stderr,none": 0.006528678957835455,
						"alias": " - lambada_standard",
						"perplexity,none": 4.084807272492733,
						"perplexity_stderr,none": 0.0841608720399173
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.03260236755288182,
						"acc_stderr,none": 0.0024742247822429524,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 705.2578127854631,
						"perplexity_stderr,none": 23.653250250235494
					},
					"logiqa": {
						"acc,none": 0.2411674347158218,
						"acc_norm,none": 0.2764976958525346,
						"acc_norm_stderr,none": 0.0175432090758252,
						"acc_stderr,none": 0.016779369344911064,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.26017811704834604,
						"acc_norm,none": 0.29961832061068705,
						"acc_norm_stderr,none": 0.011557488735539875,
						"acc_stderr,none": 0.011069063455907857,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25996649916247905,
						"acc_norm,none": 0.26532663316582916,
						"acc_norm_stderr,none": 0.008082359462649727,
						"acc_stderr,none": 0.008029434758777935,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.5078373226011438,
						"acc_stderr,none": 0.005145263811106717,
						"alias": "mc_taco",
						"f1,none": 0.5511445957693423,
						"f1_stderr,none": 0.005907335427711097
					},
					"medmcqa": {
						"acc,none": 0.35739899593593116,
						"acc_norm,none": 0.35739899593593116,
						"acc_norm_stderr,none": 0.007410633744908309,
						"acc_stderr,none": 0.007410633744908309,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3739198743126473,
						"acc_norm,none": 0.3739198743126473,
						"acc_norm_stderr,none": 0.01356627691828067,
						"acc_stderr,none": 0.01356627691828067,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.4125480700754878,
						"acc_stderr,none": 0.09027862756450644,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695236,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4074074074074074,
						"acc_stderr,none": 0.04244633238353229,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3815789473684211,
						"acc_stderr,none": 0.03953173377749193,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.04793724854411019,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.03015113445777629,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4097222222222222,
						"acc_stderr,none": 0.04112490974670787,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.32947976878612717,
						"acc_stderr,none": 0.03583901754736411,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.042801058373643966,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.05009082659620333,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3617021276595745,
						"acc_stderr,none": 0.03141082197596239,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2894736842105263,
						"acc_stderr,none": 0.04266339443159394,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.45517241379310347,
						"acc_stderr,none": 0.04149886942192117,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.3148148148148148,
						"acc_stderr,none": 0.023919984164047742,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.31746031746031744,
						"acc_stderr,none": 0.04163453031302859,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.047609522856952344,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.44516129032258067,
						"acc_stderr,none": 0.02827241018621491,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.3399014778325123,
						"acc_stderr,none": 0.033327690684107895,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.049756985195624284,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5636363636363636,
						"acc_stderr,none": 0.03872592983524754,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.46464646464646464,
						"acc_stderr,none": 0.03553436368828063,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5595854922279793,
						"acc_stderr,none": 0.03582724530036095,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.37435897435897436,
						"acc_stderr,none": 0.024537591572830506,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.31851851851851853,
						"acc_stderr,none": 0.02840653309060846,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.36134453781512604,
						"acc_stderr,none": 0.031204691225150016,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.24503311258278146,
						"acc_stderr,none": 0.03511807571804723,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5284403669724771,
						"acc_stderr,none": 0.021402615697348044,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2175925925925926,
						"acc_stderr,none": 0.028139689444859676,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5196078431372549,
						"acc_stderr,none": 0.03506612560524866,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5864978902953587,
						"acc_stderr,none": 0.03205649904851859,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.42152466367713004,
						"acc_stderr,none": 0.03314190222110657,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5038167938931297,
						"acc_stderr,none": 0.043851623256015534,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.39064824654622743,
						"acc_stderr,none": 0.096048292049521,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4628099173553719,
						"acc_stderr,none": 0.04551711196104218,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5092592592592593,
						"acc_stderr,none": 0.04832853553437056,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3987730061349693,
						"acc_stderr,none": 0.03847021420456023,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.04432804055291518,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5436893203883495,
						"acc_stderr,none": 0.049318019942204146,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6111111111111112,
						"acc_stderr,none": 0.03193705726200293,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.04975698519562428,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5964240102171137,
						"acc_stderr,none": 0.017544332237926424,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.4161849710982659,
						"acc_stderr,none": 0.02653818910470548,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23128491620111732,
						"acc_stderr,none": 0.014102223623152577,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.4673202614379085,
						"acc_stderr,none": 0.028568699752225868,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.46057289990344386,
						"acc_stderr,none": 0.08539426945163622,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.5337620578778135,
						"acc_stderr,none": 0.02833327710956278,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.48148148148148145,
						"acc_stderr,none": 0.027801656212323667,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.3191489361702128,
						"acc_stderr,none": 0.0278079901413202,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3272490221642764,
						"acc_stderr,none": 0.011983819806464747,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.38235294117647056,
						"acc_stderr,none": 0.029520095697687765,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.4395424836601307,
						"acc_stderr,none": 0.020079420408087915,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.43636363636363634,
						"acc_stderr,none": 0.04750185058907296,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.40816326530612246,
						"acc_stderr,none": 0.03146465712827424,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.46376340591485216,
						"acc_stderr,none": 0.07434625886544384,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.6467661691542289,
						"acc_stderr,none": 0.03379790611796777,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.34792261338407865,
						"acc_stderr,none": 0.07043041013199013,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.04902071300001974,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.03726214354322415,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6374269005847953,
						"acc_stderr,none": 0.0368713061556206,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7943963321446765,
						"acc_stderr,none": 0.004079538323702946,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7954637917005696,
						"acc_stderr,none": 0.004068145340250349,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.696078431372549,
						"acc_stderr,none": 0.022798834443163555,
						"alias": "mrpc",
						"f1,none": 0.8176470588235294,
						"f1_stderr,none": 0.01609184235033859
					},
					"multimedqa": {
						"acc,none": 0.3873669268985096,
						"acc_norm,none": 0.36050234133502,
						"acc_norm_stderr,none": 0.00011154735371387975,
						"acc_stderr,none": 0.08174974322194567,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5662128712871287,
						"acc_stderr,none": 0.007118552456859644,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7102144484998948,
						"mrr_stderr,none": 0.010334435914923232,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407455,
						"r@2,none": 0.41196388261851014,
						"r@2_stderr,none": 0.016544739619609426
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6578254342819175,
						"mrr_stderr,none": 0.010454987139158683,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4616252821670429,
						"r@2_stderr,none": 0.016757741478801033
					},
					"openbookqa": {
						"acc,none": 0.304,
						"acc_norm,none": 0.418,
						"acc_norm_stderr,none": 0.022080014812228137,
						"acc_stderr,none": 0.020591649571224932,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.443,
						"acc_stderr,none": 0.011110230358066703,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3905,
						"acc_stderr,none": 0.010911663814634657,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.408,
						"acc_stderr,none": 0.010992197878818591,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.545,
						"acc_stderr,none": 0.011137752231145227,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5425,
						"acc_stderr,none": 0.011142663706548619,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.535,
						"acc_stderr,none": 0.011155703691943108,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.011157250652425772,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48542857142857143,
						"acc_stderr,none": 0.0533488017756697,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7818280739934712,
						"acc_norm,none": 0.7889009793253536,
						"acc_norm_stderr,none": 0.009521377378734153,
						"acc_stderr,none": 0.009636081958374381,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.25074722459436377,
						"acc_norm,none": 0.288535439795047,
						"acc_norm_stderr,none": 0.0033101629160730713,
						"acc_stderr,none": 0.0031666888930939044,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.020629569998345407,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7466147264761285,
						"acc_norm,none": 0.6398228883396612,
						"acc_norm_stderr,none": 0.00931138936130364,
						"acc_stderr,none": 0.15323417083329235,
						"alias": "pythia",
						"bits_per_byte,none": 0.6339162169534232,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5517715883768304,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.1430043279833377,
						"perplexity_stderr,none": 0.06078958209458802,
						"word_perplexity,none": 10.481953286429421,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3971631205673759,
						"acc_norm,none": 0.4521276595744681,
						"acc_norm_stderr,none": 0.05921656033438769,
						"acc_stderr,none": 0.044889894198871454,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.45,
						"acc_norm,none": 0.5666666666666667,
						"acc_norm_stderr,none": 0.04542567625794981,
						"acc_stderr,none": 0.04560517440787952,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.34375,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.03920394987159571,
						"acc_stderr,none": 0.03766668927755763,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.40492957746478875,
						"acc_norm,none": 0.41901408450704225,
						"acc_norm_stderr,none": 0.029329448381681836,
						"acc_stderr,none": 0.029179692752203355,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7563937670046995,
						"acc_stderr,none": 0.0021348711538019946,
						"alias": "qqp",
						"f1,none": 0.6872439744688958,
						"f1_stderr,none": 0.0029835552691493644
					},
					"race": {
						"acc,none": 0.3492822966507177,
						"acc_stderr,none": 0.014754834713104492,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2622,
						"em_stderr,none": 0.004398528243068212,
						"f1,none": 0.27193523834049704,
						"f1_stderr,none": 0.004411570223398709
					},
					"rte": {
						"acc,none": 0.6787003610108303,
						"acc_stderr,none": 0.02810862605328869,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.955,
						"acc_norm,none": 0.944,
						"acc_norm_stderr,none": 0.007274401481697071,
						"acc_stderr,none": 0.006558812241406115,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6750902527075813,
						"acc_stderr,none": 0.028190822551170357,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.01120598290257748,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5864740577826651,
						"acc_norm,none": 0.77221833449965,
						"acc_norm_stderr,none": 0.002965242106965758,
						"acc_stderr,none": 0.003481821519873869,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6748194735616119,
						"acc_stderr,none": 0.06325286648771192,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6227964743589743,
						"acc_stderr,none": 0.004850990328859875,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8430120604033647,
						"acc_stderr,none": 0.0036625155995172196,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5630392156862745,
						"acc_stderr,none": 0.00491147296156521,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.34066057096857333,
						"acc_stderr,none": 0.0014986143632334042,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3402692778457772,
						"bleu_acc_stderr,none": 0.016586304901762553,
						"bleu_diff,none": -6.043849772068066,
						"bleu_diff_stderr,none": 0.8765634826745657,
						"bleu_max,none": 27.163948579181803,
						"bleu_max_stderr,none": 0.7931898909880212,
						"rouge1_acc,none": 0.2974296205630355,
						"rouge1_acc_stderr,none": 0.016002651487361002,
						"rouge1_diff,none": -7.609244770198295,
						"rouge1_diff_stderr,none": 0.9643413459782677,
						"rouge1_max,none": 52.933340683269385,
						"rouge1_max_stderr,none": 0.8471478288298345,
						"rouge2_acc,none": 0.2778457772337821,
						"rouge2_acc_stderr,none": 0.015680929364024643,
						"rouge2_diff,none": -9.038968149823395,
						"rouge2_diff_stderr,none": 1.1549083395656679,
						"rouge2_max,none": 37.32845620528283,
						"rouge2_max_stderr,none": 0.9968245606197573,
						"rougeL_acc,none": 0.29865361077111385,
						"rougeL_acc_stderr,none": 0.016021570613768545,
						"rougeL_diff,none": -7.8440069575113505,
						"rougeL_diff_stderr,none": 0.9886490732442875,
						"rougeL_max,none": 49.95832859521396,
						"rougeL_max_stderr,none": 0.8649734725042382
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3402692778457772,
						"bleu_acc_stderr,none": 0.016586304901762553,
						"bleu_diff,none": -6.043849772068066,
						"bleu_diff_stderr,none": 0.8765634826745657,
						"bleu_max,none": 27.163948579181803,
						"bleu_max_stderr,none": 0.7931898909880212,
						"rouge1_acc,none": 0.2974296205630355,
						"rouge1_acc_stderr,none": 0.016002651487361002,
						"rouge1_diff,none": -7.609244770198295,
						"rouge1_diff_stderr,none": 0.9643413459782677,
						"rouge1_max,none": 52.933340683269385,
						"rouge1_max_stderr,none": 0.8471478288298345,
						"rouge2_acc,none": 0.2778457772337821,
						"rouge2_acc_stderr,none": 0.015680929364024643,
						"rouge2_diff,none": -9.038968149823395,
						"rouge2_diff_stderr,none": 1.1549083395656679,
						"rouge2_max,none": 37.32845620528283,
						"rouge2_max_stderr,none": 0.9968245606197573,
						"rougeL_acc,none": 0.29865361077111385,
						"rougeL_acc_stderr,none": 0.016021570613768545,
						"rougeL_diff,none": -7.8440069575113505,
						"rougeL_diff_stderr,none": 0.9886490732442875,
						"rougeL_max,none": 49.95832859521396,
						"rougeL_max_stderr,none": 0.8649734725042382
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2692778457772338,
						"acc_stderr,none": 0.015528566637087274,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.41204329615991286,
						"acc_stderr,none": 0.01438957662219241,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.12253937007874016,
						"exact_match_stderr,none": 0.007276066837654692
					},
					"wic": {
						"acc,none": 0.5047021943573667,
						"acc_stderr,none": 0.01980984521925977,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6339933528504801,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5518545584363792,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.484950606251948,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7253354380426204,
						"acc_stderr,none": 0.012544516005117197,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.4230769230769231,
						"acc_stderr,none": 0.048679937479186836,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8608058608058609,
						"acc_stderr,none": 0.020988366070851,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6201818181818182,
						"acc_stderr,none": 0.06451327389540393,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.618,
						"acc_stderr,none": 0.021750820591250834,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.02236516042423134,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.02051442622562804,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.020143572847290785,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.02218721580302901,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.564,
						"acc_stderr,none": 0.0221989546414768,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.648,
						"acc_stderr,none": 0.021380042385946044,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.020143572847290795,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.02062956999834541,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43180722891566264,
						"acc_stderr,none": 0.046774996360704875,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3385542168674699,
						"acc_stderr,none": 0.00948525020851688,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4666666666666667,
						"acc_stderr,none": 0.009999776793187627,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4771084337349398,
						"acc_stderr,none": 0.010011563747774337,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39397590361445783,
						"acc_stderr,none": 0.009794163014906763,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5269076305220883,
						"acc_stderr,none": 0.010007549970702514,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4891566265060241,
						"acc_stderr,none": 0.010019715824483494,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4779116465863454,
						"acc_stderr,none": 0.010012288645591784,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41767068273092367,
						"acc_stderr,none": 0.009885277727840175,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4811244979919679,
						"acc_stderr,none": 0.010014928901071305,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.41044176706827307,
						"acc_stderr,none": 0.00985999467258512,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42008032128514056,
						"acc_stderr,none": 0.009893219469115701,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.41927710843373495,
						"acc_stderr,none": 0.009890599137391928,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41244979919678715,
						"acc_stderr,none": 0.009867237678555586,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40160642570281124,
						"acc_stderr,none": 0.009826103601507121,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3441767068273092,
						"acc_stderr,none": 0.009522954469806038,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6312496239696769,
						"acc_stderr,none": 0.055551939219346536,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.586366644606221,
						"acc_stderr,none": 0.012673714851823765,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7888815354070152,
						"acc_stderr,none": 0.010502205965083534,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7180675049636003,
						"acc_stderr,none": 0.011578884735064788,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5764394440767704,
						"acc_stderr,none": 0.012715871382881435,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5949702183984117,
						"acc_stderr,none": 0.01263288721875138,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6677696889477167,
						"acc_stderr,none": 0.012121168923544593,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5519523494374586,
						"acc_stderr,none": 0.012797478885304744,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6836532097948379,
						"acc_stderr,none": 0.011967713146973749,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5539377895433488,
						"acc_stderr,none": 0.012792037953589649,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5797485109199206,
						"acc_stderr,none": 0.012702405649149102,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6419589675711449,
						"acc_stderr,none": 0.012337624883487573,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8168127669139132,
						"acc_stderr,none": 0.040190973040705194,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8821505376344086,
						"acc_stderr,none": 0.006688320753621775,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7228915662650602,
						"acc_stderr,none": 0.04942589299783093,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7361835245046924,
						"acc_stderr,none": 0.014238401966368419,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.8136882129277566,
						"acc_stderr,none": 0.024054621770299663,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6793650793650794,
						"acc_stderr,none": 0.02633857021981404,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7718253968253969,
						"acc_stderr,none": 0.018711525330668,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/Eagle-225-1FT"
	},
	"./rwkv-x-dev/EagleX-1_7T_pth": {
		"config": {
			"dtype=float16,trust_remote_code=True": {
				"confObj": {
					"dtype": "float16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=float16,trust_remote_code=True",
				"groups": {
					"mmlu": {
						"acc,none": 0.3490955704315625,
						"acc_stderr,none": 0.06442808867654336,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3321997874601488,
						"acc_stderr,none": 0.06579588075896782,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.39330543933054396,
						"acc_stderr,none": 0.05451192436228265,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.36626584335391615,
						"acc_stderr,none": 0.05622286564556315,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.31398667935299707,
						"acc_stderr,none": 0.06320147713560288,
						"alias": " - stem"
					}
				},
				"results": {
					"arc_challenge": {
						"acc,none": 0.43430034129692835,
						"acc_norm,none": 0.4709897610921502,
						"acc_norm_stderr,none": 0.014586776355294326,
						"acc_stderr,none": 0.014484703048857355,
						"alias": "arc_challenge"
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.12585291887793784,
						"exact_match_stderr,get-answer": 0.009136212598406305
					},
					"hellaswag": {
						"acc,none": 0.5422226648078072,
						"acc_norm,none": 0.7303326030671181,
						"acc_norm_stderr,none": 0.004428800140739965,
						"acc_stderr,none": 0.0049719584809204895,
						"alias": "hellaswag"
					},
					"mmlu": {
						"acc,none": 0.3490955704315625,
						"acc_stderr,none": 0.06442808867654336,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.042295258468165065,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.03944624162501116,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3815789473684211,
						"acc_stderr,none": 0.03953173377749194,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.35471698113207545,
						"acc_stderr,none": 0.02944517532819959,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3680555555555556,
						"acc_stderr,none": 0.04032999053960718,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542129,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3468208092485549,
						"acc_stderr,none": 0.036291466701596636,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.19607843137254902,
						"acc_stderr,none": 0.03950581861179964,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.32340425531914896,
						"acc_stderr,none": 0.030579442773610344,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2719298245614035,
						"acc_stderr,none": 0.04185774424022057,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.3793103448275862,
						"acc_stderr,none": 0.04043461861916747,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2830687830687831,
						"acc_stderr,none": 0.023201392938194978,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.0404061017820884,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.04793724854411021,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3935483870967742,
						"acc_stderr,none": 0.027791878753132274,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.03178529710642749,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.509090909090909,
						"acc_stderr,none": 0.03903698647748441,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.30303030303030304,
						"acc_stderr,none": 0.03274287914026867,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.43523316062176165,
						"acc_stderr,none": 0.03578038165008586,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3487179487179487,
						"acc_stderr,none": 0.02416278028401772,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.026067159222275794,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.27310924369747897,
						"acc_stderr,none": 0.028942004040998164,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2119205298013245,
						"acc_stderr,none": 0.03336767086567977,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3908256880733945,
						"acc_stderr,none": 0.020920058346111062,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.30092592592592593,
						"acc_stderr,none": 0.031280390843298804,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.36764705882352944,
						"acc_stderr,none": 0.03384132045674119,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.46835443037974683,
						"acc_stderr,none": 0.03248197400511075,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3991031390134529,
						"acc_stderr,none": 0.032867453125679603,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.42748091603053434,
						"acc_stderr,none": 0.04338920305792401,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3321997874601488,
						"acc_stderr,none": 0.06579588075896782,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4297520661157025,
						"acc_stderr,none": 0.04519082021319772,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3611111111111111,
						"acc_stderr,none": 0.04643454608906275,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.4171779141104294,
						"acc_stderr,none": 0.038741028598180814,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3125,
						"acc_stderr,none": 0.043994650575715215,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.44660194174757284,
						"acc_stderr,none": 0.04922424153458933,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.4829059829059829,
						"acc_stderr,none": 0.032736940493481824,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4610472541507024,
						"acc_stderr,none": 0.017825621793239016,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.36416184971098264,
						"acc_stderr,none": 0.025906632631016124,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24581005586592178,
						"acc_stderr,none": 0.014400296429225615,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3888888888888889,
						"acc_stderr,none": 0.027914055510468008,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.39330543933054396,
						"acc_stderr,none": 0.05451192436228265,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3536977491961415,
						"acc_stderr,none": 0.027155208103200868,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.36419753086419754,
						"acc_stderr,none": 0.026774929899722327,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.30851063829787234,
						"acc_stderr,none": 0.02755336616510137,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.29465449804432853,
						"acc_stderr,none": 0.011643576764069545,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.34191176470588236,
						"acc_stderr,none": 0.02881472242225418,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.32516339869281047,
						"acc_stderr,none": 0.01895088677080631,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.38181818181818183,
						"acc_stderr,none": 0.046534298079135075,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.4326530612244898,
						"acc_stderr,none": 0.031717528240626645,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.36626584335391615,
						"acc_stderr,none": 0.05622286564556315,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.4626865671641791,
						"acc_stderr,none": 0.035256751674679745,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.31398667935299707,
						"acc_stderr,none": 0.06320147713560288,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3192771084337349,
						"acc_stderr,none": 0.036293353299478595,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.42105263157894735,
						"acc_stderr,none": 0.037867207062342145,
						"alias": "  - world_religions"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.40180645396255277,
						"acc_stderr,none": 0.014115349913646186,
						"alias": "truthfulqa_mc2"
					},
					"winogrande": {
						"acc,none": 0.7324388318863457,
						"acc_stderr,none": 0.012441718456893012,
						"alias": "winogrande"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/EagleX-1_7T_pth"
	},
	"./rwkv-x-dev/EagleX_1-7T_Chat_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6133032694475761,
						"acc_norm,none": 0.5882187147688839,
						"acc_norm_stderr,none": 0.08084121980695365,
						"acc_stderr,none": 0.10127537393964518,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.460625,
						"acc_stderr,none": 0.043708034590508565,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.01655,
						"acc_stderr,none": 0.013171501581899102,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8138059701492537,
						"acc_stderr,none": 0.15923992237046108,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.27860326894502235,
						"acc_norm,none": 0.27860326894502235,
						"acc_norm_stderr,none": 0.12300582557999447,
						"acc_stderr,none": 0.12300582557999447,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.30806423761008467,
						"acc_norm,none": 0.30806423761008467,
						"acc_norm_stderr,none": 0.05592566596969818,
						"acc_stderr,none": 0.05592566596969818,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4926393858079905,
						"likelihood_diff_stderr,none": 0.5042965914140495,
						"pct_stereotype,none": 0.6156827668455574,
						"pct_stereotype_stderr,none": 0.07204227282589995
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"glue": {
						"acc,none": 0.7331642673202877,
						"acc_stderr,none": 0.05455912283558178,
						"alias": "glue",
						"f1,none": 0.7183980579574881,
						"f1_stderr,none": 0.000152408520705082,
						"mcc,none": 0.06558874629318973,
						"mcc_stderr,none": 0.0008455417152003534
					},
					"kmmlu": {
						"acc,none": 0.16361536240254115,
						"acc_norm,none": 0.16361536240254115,
						"acc_norm_stderr,none": 0.0429225325507286,
						"acc_stderr,none": 0.0429225325507286,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5340934005700504,
						"acc_norm,none": 0.558,
						"acc_norm_stderr,none": 0.000494260521042081,
						"acc_stderr,none": 0.04325220377388345,
						"alias": "kobest",
						"f1,none": 0.45285034899266385,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7182223947215215,
						"acc_stderr,none": 0.017368766029487847,
						"alias": "lambada",
						"perplexity,none": 3.6002256917311377,
						"perplexity_stderr,none": 0.17447573380469258
					},
					"lambada_cloze": {
						"acc,none": 0.04822433533863769,
						"acc_stderr,none": 0.0077288820544181595,
						"alias": "lambada_cloze",
						"perplexity,none": 583.9834172920337,
						"perplexity_stderr,none": 114.45949044873223
					},
					"lambada_multilingual": {
						"acc,none": 0.5374345041723269,
						"acc_stderr,none": 0.08717805057976721,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.086143578233564,
						"perplexity_stderr,none": 8.228344482254014
					},
					"mmlu": {
						"acc,none": 0.4125480700754878,
						"acc_stderr,none": 0.09611339133682227,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.38788522848034007,
						"acc_stderr,none": 0.10850461127764824,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.46572256195687156,
						"acc_stderr,none": 0.08647027508358718,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4631134221644459,
						"acc_stderr,none": 0.07534637607080809,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.34760545512210606,
						"acc_stderr,none": 0.07312862053235213,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.39048970901348473,
						"acc_norm,none": 0.3587351329877091,
						"acc_norm_stderr,none": 0.0001103090951982101,
						"acc_stderr,none": 0.08199898787604479,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4844285714285715,
						"acc_stderr,none": 0.05429494597872282,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7380420589998629,
						"acc_norm,none": 0.592985728176822,
						"acc_norm_stderr,none": 0.00840798574418502,
						"acc_stderr,none": 0.15165565288003244,
						"alias": "pythia",
						"bits_per_byte,none": 0.6340000603197895,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.551861773433815,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2825187815533297,
						"perplexity_stderr,none": 0.06331468171650306,
						"word_perplexity,none": 10.4852112832597,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.375886524822695,
						"acc_norm,none": 0.425531914893617,
						"acc_norm_stderr,none": 0.059833619700894396,
						"acc_stderr,none": 0.039172632951299885,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6697946823732988,
						"acc_stderr,none": 0.08612731890162503,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.34668011043267505,
						"acc_stderr,none": 0.0017250397595881194,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.36964504283965727,
						"bleu_acc_stderr,none": 0.016898180706973895,
						"bleu_diff,none": -4.614693101299436,
						"bleu_diff_stderr,none": 0.8504537836168142,
						"bleu_max,none": 25.862220055641405,
						"bleu_max_stderr,none": 0.7929138750930529,
						"rouge1_acc,none": 0.3317013463892289,
						"rouge1_acc_stderr,none": 0.016482148810241473,
						"rouge1_diff,none": -6.002948469529099,
						"rouge1_diff_stderr,none": 0.9675382290582,
						"rouge1_max,none": 51.14851831893734,
						"rouge1_max_stderr,none": 0.8589736557732265,
						"rouge2_acc,none": 0.29008567931456547,
						"rouge2_acc_stderr,none": 0.01588623687420952,
						"rouge2_diff,none": -7.385388894027129,
						"rouge2_diff_stderr,none": 1.1552207099752347,
						"rouge2_max,none": 35.25623300551201,
						"rouge2_max_stderr,none": 1.0158567172510493,
						"rougeL_acc,none": 0.3329253365973072,
						"rougeL_acc_stderr,none": 0.016497402382012052,
						"rougeL_diff,none": -6.25156253036408,
						"rougeL_diff_stderr,none": 0.9816664026630537,
						"rougeL_max,none": 48.200946760002516,
						"rougeL_max_stderr,none": 0.8811265093627599
					},
					"xcopa": {
						"acc,none": 0.6216363636363635,
						"acc_stderr,none": 0.06984389251251383,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43809906291834005,
						"acc_stderr,none": 0.04978173817572529,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6296853378256423,
						"acc_stderr,none": 0.061607409074009495,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8102944481906046,
						"acc_stderr,none": 0.03441658645048334,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6133032694475761,
						"acc_norm,none": 0.5882187147688839,
						"acc_norm_stderr,none": 0.08084121980695365,
						"acc_stderr,none": 0.10127537393964518,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.460625,
						"acc_stderr,none": 0.043708034590508565,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.549,
						"acc_stderr,none": 0.015743152379585533,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.424,
						"acc_stderr,none": 0.015635487471405193,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.4175,
						"acc_stderr,none": 0.014241856751717625,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3993174061433447,
						"acc_norm,none": 0.4180887372013652,
						"acc_norm_stderr,none": 0.014413988396996077,
						"acc_stderr,none": 0.014312094557946693,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7188552188552189,
						"acc_norm,none": 0.6721380471380471,
						"acc_norm_stderr,none": 0.009632587076170014,
						"acc_stderr,none": 0.009224735470286998,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.01655,
						"acc_stderr,none": 0.013171501581899102,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0215,
						"acc_stderr,none": 0.003244092641792793,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0335,
						"acc_stderr,none": 0.004024546370306099,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0485,
						"acc_stderr,none": 0.004804728682127104,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.014,
						"acc_stderr,none": 0.002627822811066777,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0025,
						"acc_stderr,none": 0.0011169148353275425,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.005,
						"acc_stderr,none": 0.0015775754727385047,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.002,
						"acc_stderr,none": 0.0009992493430695027,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.008,
						"acc_stderr,none": 0.0019924821184884632,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000102,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0335,
						"acc_stderr,none": 0.004024546370306094,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8138059701492537,
						"acc_stderr,none": 0.15923992237046108,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.01042749887234396,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.003299983316607816,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.00223158687484488,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.01110798754893915,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942317,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.012864077288499334,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.633,
						"acc_stderr,none": 0.015249378464171756,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103298,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998106,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792944,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666666,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406095,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.00914437639315111,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323494,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910633,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662746,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.759,
						"acc_stderr,none": 0.013531522534515443,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.709,
						"acc_stderr,none": 0.014370995982377935,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.709,
						"acc_stderr,none": 0.014370995982377939,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487914,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.0098200016513457,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689071,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.388,
						"acc_stderr,none": 0.015417317979911076,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151094,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.818,
						"acc_stderr,none": 0.01220758063766216,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.599,
						"acc_stderr,none": 0.015506109745498323,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.752,
						"acc_stderr,none": 0.013663187134877653,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662763,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524308,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118576,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248113,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.613,
						"acc_stderr,none": 0.015410011955493939,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.00882342636694232,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.461,
						"acc_stderr,none": 0.015771104201283186,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.622,
						"acc_stderr,none": 0.01534116525402664,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.573,
						"acc_stderr,none": 0.01564978964446222,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.747,
						"acc_stderr,none": 0.01375427861358708,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.01578686875935903,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.010605256784796584,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103312,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.01312750285969624,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333459,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910622,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662768,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.789,
						"acc_stderr,none": 0.012909130321042092,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.681,
						"acc_stderr,none": 0.01474640486547348,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.0072744014816970536,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855745,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767624,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.635,
						"acc_stderr,none": 0.015231776226264912,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.483,
						"acc_stderr,none": 0.01581015372983343,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108654,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.00828206451270416,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.015356947477797587,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235239,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747391,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.385,
						"acc_stderr,none": 0.01539519444541081,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.011628164696727176,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.0078552979386976,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.00936368937324811,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698455,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.005814534272734962,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.372,
						"acc_stderr,none": 0.015292149942040577,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.331,
						"acc_stderr,none": 0.014888272588203931,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6428134556574924,
						"acc_stderr,none": 0.008380743796951405,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.04459412925079224,
						"alias": "cb",
						"f1,none": 0.8244289585753001,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.27860326894502235,
						"acc_norm,none": 0.27860326894502235,
						"acc_norm_stderr,none": 0.12300582557999447,
						"acc_stderr,none": 0.12300582557999447,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122592,
						"acc_stderr,none": 0.08503766788122592,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.23404255319148937,
						"acc_norm,none": 0.23404255319148937,
						"acc_norm_stderr,none": 0.062426763436828805,
						"acc_stderr,none": 0.062426763436828805,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445796,
						"acc_stderr,none": 0.06180629713445796,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.1042572070285374,
						"acc_stderr,none": 0.1042572070285374,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966263,
						"acc_stderr,none": 0.06520506636966263,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.1836734693877551,
						"acc_norm,none": 0.1836734693877551,
						"acc_norm_stderr,none": 0.05589005688828226,
						"acc_stderr,none": 0.05589005688828226,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.07593355178041425,
						"acc_stderr,none": 0.07593355178041425,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.07099970268936748,
						"acc_stderr,none": 0.07099970268936748,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.30806423761008467,
						"acc_norm,none": 0.30806423761008467,
						"acc_norm_stderr,none": 0.05592566596969818,
						"acc_stderr,none": 0.05592566596969818,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.3254437869822485,
						"acc_norm,none": 0.3254437869822485,
						"acc_norm_stderr,none": 0.03614867847292204,
						"acc_stderr,none": 0.03614867847292204,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.036628698766429046,
						"acc_stderr,none": 0.036628698766429046,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.03445000289173461,
						"acc_stderr,none": 0.03445000289173461,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.425,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.0392039498715957,
						"acc_stderr,none": 0.0392039498715957,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03453131801885415,
						"acc_stderr,none": 0.03453131801885415,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3444976076555024,
						"acc_norm,none": 0.3444976076555024,
						"acc_norm_stderr,none": 0.03294948099678349,
						"acc_stderr,none": 0.03294948099678349,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.036342189215581536,
						"acc_stderr,none": 0.036342189215581536,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3282442748091603,
						"acc_norm,none": 0.3282442748091603,
						"acc_norm_stderr,none": 0.041184385658062976,
						"acc_stderr,none": 0.041184385658062976,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3014705882352941,
						"acc_norm,none": 0.3014705882352941,
						"acc_norm_stderr,none": 0.03949552929827394,
						"acc_stderr,none": 0.03949552929827394,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.04556837693674772,
						"acc_stderr,none": 0.04556837693674772,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.32507739938080493,
						"acc_norm,none": 0.32507739938080493,
						"acc_norm_stderr,none": 0.02610312109754256,
						"acc_stderr,none": 0.02610312109754256,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.31862745098039214,
						"acc_norm,none": 0.31862745098039214,
						"acc_norm_stderr,none": 0.032702871814820816,
						"acc_stderr,none": 0.032702871814820816,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3854748603351955,
						"acc_norm,none": 0.3854748603351955,
						"acc_norm_stderr,none": 0.03648025419294365,
						"acc_stderr,none": 0.03648025419294365,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.02830465794303529,
						"acc_stderr,none": 0.02830465794303529,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800374,
						"acc_stderr,none": 0.04142972007800374,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.411214953271028,
						"acc_norm,none": 0.411214953271028,
						"acc_norm_stderr,none": 0.04779251692801369,
						"acc_stderr,none": 0.04779251692801369,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.36792452830188677,
						"acc_norm,none": 0.36792452830188677,
						"acc_norm_stderr,none": 0.047061871107614554,
						"acc_stderr,none": 0.047061871107614554,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809826,
						"acc_stderr,none": 0.039578354719809826,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.044298119496145844,
						"acc_stderr,none": 0.044298119496145844,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.040842473153370994,
						"acc_stderr,none": 0.040842473153370994,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.27472527472527475,
						"acc_norm,none": 0.27472527472527475,
						"acc_norm_stderr,none": 0.027065504564389525,
						"acc_stderr,none": 0.027065504564389525,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3137254901960784,
						"acc_norm,none": 0.3137254901960784,
						"acc_norm_stderr,none": 0.03256685484460388,
						"acc_stderr,none": 0.03256685484460388,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2807017543859649,
						"acc_norm,none": 0.2807017543859649,
						"acc_norm_stderr,none": 0.03446296217088426,
						"acc_stderr,none": 0.03446296217088426,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2925170068027211,
						"acc_norm,none": 0.2925170068027211,
						"acc_norm_stderr,none": 0.03764931984085173,
						"acc_stderr,none": 0.03764931984085173,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.26618705035971224,
						"acc_norm,none": 0.26618705035971224,
						"acc_norm_stderr,none": 0.037622409350890895,
						"acc_stderr,none": 0.037622409350890895,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3522012578616352,
						"acc_norm,none": 0.3522012578616352,
						"acc_norm_stderr,none": 0.03800029412130649,
						"acc_stderr,none": 0.03800029412130649,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.34355828220858897,
						"acc_norm,none": 0.34355828220858897,
						"acc_norm_stderr,none": 0.03731133519673893,
						"acc_stderr,none": 0.03731133519673893,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.3023255813953488,
						"acc_norm,none": 0.3023255813953488,
						"acc_norm_stderr,none": 0.03512091263428369,
						"acc_stderr,none": 0.03512091263428369,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2976190476190476,
						"acc_norm,none": 0.2976190476190476,
						"acc_norm_stderr,none": 0.02885890598472122,
						"acc_stderr,none": 0.02885890598472122,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.03191178226713547,
						"acc_stderr,none": 0.03191178226713547,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.4369747899159664,
						"acc_norm,none": 0.4369747899159664,
						"acc_norm_stderr,none": 0.032219436365661956,
						"acc_stderr,none": 0.032219436365661956,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26956521739130435,
						"acc_norm,none": 0.26956521739130435,
						"acc_norm_stderr,none": 0.029322764228949517,
						"acc_stderr,none": 0.029322764228949517,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.3037037037037037,
						"acc_norm,none": 0.3037037037037037,
						"acc_norm_stderr,none": 0.039725528847851375,
						"acc_stderr,none": 0.039725528847851375,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.32167832167832167,
						"acc_norm,none": 0.32167832167832167,
						"acc_norm_stderr,none": 0.03919986517659165,
						"acc_stderr,none": 0.03919986517659165,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.30113636363636365,
						"acc_norm,none": 0.30113636363636365,
						"acc_norm_stderr,none": 0.03467837977202437,
						"acc_stderr,none": 0.03467837977202437,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3087248322147651,
						"acc_norm,none": 0.3087248322147651,
						"acc_norm_stderr,none": 0.037973480272130815,
						"acc_stderr,none": 0.037973480272130815,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3050847457627119,
						"acc_norm,none": 0.3050847457627119,
						"acc_norm_stderr,none": 0.04256799926288003,
						"acc_stderr,none": 0.04256799926288003,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2636363636363636,
						"acc_norm,none": 0.2636363636363636,
						"acc_norm_stderr,none": 0.04220224692971987,
						"acc_stderr,none": 0.04220224692971987,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03873144730600104,
						"acc_stderr,none": 0.03873144730600104,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.30158730158730157,
						"acc_norm,none": 0.30158730158730157,
						"acc_norm_stderr,none": 0.04104947269903394,
						"acc_stderr,none": 0.04104947269903394,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3546511627906977,
						"acc_norm,none": 0.3546511627906977,
						"acc_norm_stderr,none": 0.036584734259385424,
						"acc_stderr,none": 0.036584734259385424,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2798053527980535,
						"acc_norm,none": 0.2798053527980535,
						"acc_norm_stderr,none": 0.02216976172592782,
						"acc_stderr,none": 0.02216976172592782,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.43457943925233644,
						"acc_norm,none": 0.43457943925233644,
						"acc_norm_stderr,none": 0.03396491908994051,
						"acc_stderr,none": 0.03396491908994051,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3170731707317073,
						"acc_norm,none": 0.3170731707317073,
						"acc_norm_stderr,none": 0.04212955964853051,
						"acc_stderr,none": 0.04212955964853051,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.29508196721311475,
						"acc_norm,none": 0.29508196721311475,
						"acc_norm_stderr,none": 0.04146178164901212,
						"acc_stderr,none": 0.04146178164901212,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.32857142857142857,
						"acc_norm,none": 0.32857142857142857,
						"acc_norm_stderr,none": 0.03248939796876841,
						"acc_stderr,none": 0.03248939796876841,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.32222222222222224,
						"acc_norm,none": 0.32222222222222224,
						"acc_norm_stderr,none": 0.03492970288642683,
						"acc_stderr,none": 0.03492970288642683,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31746031746031744,
						"acc_norm,none": 0.31746031746031744,
						"acc_norm_stderr,none": 0.033949216164478796,
						"acc_stderr,none": 0.033949216164478796,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.29310344827586204,
						"acc_norm,none": 0.29310344827586204,
						"acc_norm_stderr,none": 0.04244626443180183,
						"acc_stderr,none": 0.04244626443180183,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2827586206896552,
						"acc_norm,none": 0.2827586206896552,
						"acc_norm_stderr,none": 0.037528339580033376,
						"acc_stderr,none": 0.037528339580033376,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.04622501635210239,
						"acc_stderr,none": 0.04622501635210239,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03424737867752742,
						"acc_stderr,none": 0.03424737867752742,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767256,
						"acc_stderr,none": 0.030113040167767256,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.02251703243459229,
						"acc_stderr,none": 0.02251703243459229,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3577586206896552,
						"acc_norm,none": 0.3577586206896552,
						"acc_norm_stderr,none": 0.0315382945960225,
						"acc_stderr,none": 0.0315382945960225,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3390804597701149,
						"acc_norm,none": 0.3390804597701149,
						"acc_norm_stderr,none": 0.03599172203897236,
						"acc_stderr,none": 0.03599172203897236,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03944624162501116,
						"acc_stderr,none": 0.03944624162501116,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3274336283185841,
						"acc_norm,none": 0.3274336283185841,
						"acc_norm_stderr,none": 0.031285129400738305,
						"acc_stderr,none": 0.031285129400738305,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3515151515151515,
						"acc_norm,none": 0.3515151515151515,
						"acc_norm_stderr,none": 0.0372820699868265,
						"acc_stderr,none": 0.0372820699868265,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3136094674556213,
						"acc_norm,none": 0.3136094674556213,
						"acc_norm_stderr,none": 0.035795265164562245,
						"acc_stderr,none": 0.035795265164562245,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3167701863354037,
						"acc_norm,none": 0.3167701863354037,
						"acc_norm_stderr,none": 0.03677863131157453,
						"acc_stderr,none": 0.03677863131157453,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.34375,
						"acc_norm,none": 0.34375,
						"acc_norm_stderr,none": 0.03766668927755763,
						"acc_stderr,none": 0.03766668927755763,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.06558874629318973,
						"mcc_stderr,none": 0.029078199999318276
					},
					"copa": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4926393858079905,
						"likelihood_diff_stderr,none": 0.5042965914140495,
						"pct_stereotype,none": 0.6156827668455574,
						"pct_stereotype_stderr,none": 0.07204227282589995
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.783989266547406,
						"likelihood_diff_stderr,none": 0.09063036887546638,
						"pct_stereotype,none": 0.6547406082289803,
						"pct_stereotype_stderr,none": 0.01161369408556993
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.001373626373627,
						"likelihood_diff_stderr,none": 0.38554315321511096,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.04865042554105199
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.534090909090909,
						"likelihood_diff_stderr,none": 1.7004420305242849,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.936538461538461,
						"likelihood_diff_stderr,none": 0.6077803535900607,
						"pct_stereotype,none": 0.7846153846153846,
						"pct_stereotype_stderr,none": 0.05138611236879767
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.928515625,
						"likelihood_diff_stderr,none": 0.20377178302317678,
						"pct_stereotype,none": 0.65625,
						"pct_stereotype_stderr,none": 0.02659259198114885
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.8119212962962963,
						"likelihood_diff_stderr,none": 0.250195065056097,
						"pct_stereotype,none": 0.5879629629629629,
						"pct_stereotype_stderr,none": 0.03356787758160831
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.057291666666667,
						"likelihood_diff_stderr,none": 0.32824141872432555,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.5866141732283463,
						"likelihood_diff_stderr,none": 0.15549549627684767,
						"pct_stereotype,none": 0.5610236220472441,
						"pct_stereotype_stderr,none": 0.022039775660119297
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.635135135135135,
						"likelihood_diff_stderr,none": 0.34812640240791637,
						"pct_stereotype,none": 0.7207207207207207,
						"pct_stereotype_stderr,none": 0.042776625248814384
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.012096774193548,
						"likelihood_diff_stderr,none": 0.4421774789337771,
						"pct_stereotype,none": 0.9139784946236559,
						"pct_stereotype_stderr,none": 0.029233283218071043
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.1743421052631575,
						"likelihood_diff_stderr,none": 0.24351391445248713,
						"pct_stereotype,none": 0.6947368421052632,
						"pct_stereotype_stderr,none": 0.03349781342677419
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.2031902206320813,
						"likelihood_diff_stderr,none": 0.07344759476391143,
						"pct_stereotype,none": 0.5784138342277877,
						"pct_stereotype_stderr,none": 0.01206217164717359
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.325,
						"likelihood_diff_stderr,none": 0.29906872725257383,
						"pct_stereotype,none": 0.5888888888888889,
						"pct_stereotype_stderr,none": 0.05215564061107554
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.1538461538461537,
						"likelihood_diff_stderr,none": 1.0200468431058338,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.840909090909091,
						"likelihood_diff_stderr,none": 0.4218202164166027,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.05524032911365452
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.777258566978193,
						"likelihood_diff_stderr,none": 0.12637046578089253,
						"pct_stereotype,none": 0.5763239875389408,
						"pct_stereotype_stderr,none": 0.027623283256959768
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.2381422924901186,
						"likelihood_diff_stderr,none": 0.1940532971779678,
						"pct_stereotype,none": 0.43478260869565216,
						"pct_stereotype_stderr,none": 0.031227956788816427
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.513888888888889,
						"likelihood_diff_stderr,none": 0.4336020995495588,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.05594542388644592
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.9046195652173914,
						"likelihood_diff_stderr,none": 0.14031130068141434,
						"pct_stereotype,none": 0.4934782608695652,
						"pct_stereotype_stderr,none": 0.023336016041798573
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.135869565217391,
						"likelihood_diff_stderr,none": 0.28955964397253997,
						"pct_stereotype,none": 0.6608695652173913,
						"pct_stereotype_stderr,none": 0.04433930011819816
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.70467032967033,
						"likelihood_diff_stderr,none": 0.3119914450687381,
						"pct_stereotype,none": 0.8351648351648352,
						"pct_stereotype_stderr,none": 0.039110176747367435
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.5994897959183674,
						"likelihood_diff_stderr,none": 0.23348603803568882,
						"pct_stereotype,none": 0.6938775510204082,
						"pct_stereotype_stderr,none": 0.033004389390311806
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"glue": {
						"acc,none": 0.7331642673202877,
						"acc_stderr,none": 0.05455912283558178,
						"alias": "glue",
						"f1,none": 0.7183980579574881,
						"f1_stderr,none": 0.000152408520705082,
						"mcc,none": 0.06558874629318973,
						"mcc_stderr,none": 0.0008455417152003534
					},
					"hellaswag": {
						"acc,none": 0.5428201553475404,
						"acc_norm,none": 0.7309300936068512,
						"acc_norm_stderr,none": 0.004425700307466413,
						"acc_stderr,none": 0.004971449552787176,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.16361536240254115,
						"acc_norm,none": 0.16361536240254115,
						"acc_norm_stderr,none": 0.0429225325507286,
						"acc_stderr,none": 0.0429225325507286,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.04093601807403326,
						"acc_stderr,none": 0.04093601807403326,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.175,
						"acc_norm,none": 0.175,
						"acc_norm_stderr,none": 0.012021627157731975,
						"acc_stderr,none": 0.012021627157731975,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.136,
						"acc_norm,none": 0.136,
						"acc_norm_stderr,none": 0.01084535023047299,
						"acc_stderr,none": 0.01084535023047299,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.209,
						"acc_norm,none": 0.209,
						"acc_norm_stderr,none": 0.012864077288499325,
						"acc_stderr,none": 0.012864077288499325,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.222,
						"acc_norm,none": 0.222,
						"acc_norm_stderr,none": 0.013148721948877364,
						"acc_stderr,none": 0.013148721948877364,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.205,
						"acc_norm,none": 0.205,
						"acc_norm_stderr,none": 0.016494801123591727,
						"acc_stderr,none": 0.016494801123591727,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.105,
						"acc_norm,none": 0.105,
						"acc_norm_stderr,none": 0.009698921026024957,
						"acc_stderr,none": 0.009698921026024957,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.171,
						"acc_norm,none": 0.171,
						"acc_norm_stderr,none": 0.011912216456264602,
						"acc_stderr,none": 0.011912216456264602,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.105,
						"acc_norm,none": 0.105,
						"acc_norm_stderr,none": 0.009698921026024973,
						"acc_stderr,none": 0.009698921026024973,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.030056479497755487,
						"acc_stderr,none": 0.030056479497755487,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.159,
						"acc_norm,none": 0.159,
						"acc_norm_stderr,none": 0.011569479368271315,
						"acc_stderr,none": 0.011569479368271315,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.27692307692307694,
						"acc_norm,none": 0.27692307692307694,
						"acc_norm_stderr,none": 0.03939825345266469,
						"acc_stderr,none": 0.03939825345266469,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816505,
						"acc_stderr,none": 0.04229525846816505,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.092,
						"acc_norm,none": 0.092,
						"acc_norm_stderr,none": 0.009144376393151112,
						"acc_stderr,none": 0.009144376393151112,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.13,
						"acc_norm,none": 0.13,
						"acc_norm_stderr,none": 0.010640169792499337,
						"acc_stderr,none": 0.010640169792499337,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.212,
						"acc_norm,none": 0.212,
						"acc_norm_stderr,none": 0.012931481864938022,
						"acc_stderr,none": 0.012931481864938022,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.082,
						"acc_norm,none": 0.082,
						"acc_norm_stderr,none": 0.00868051561552371,
						"acc_stderr,none": 0.00868051561552371,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.177,
						"acc_norm,none": 0.177,
						"acc_norm_stderr,none": 0.012075463420375061,
						"acc_stderr,none": 0.012075463420375061,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.172,
						"acc_norm,none": 0.172,
						"acc_norm_stderr,none": 0.011939788882495321,
						"acc_stderr,none": 0.011939788882495321,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.147,
						"acc_norm,none": 0.147,
						"acc_norm_stderr,none": 0.011203415395160335,
						"acc_stderr,none": 0.011203415395160335,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.151,
						"acc_norm,none": 0.151,
						"acc_norm_stderr,none": 0.011328165223341676,
						"acc_stderr,none": 0.011328165223341676,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.04512608598542127,
						"acc_stderr,none": 0.04512608598542127,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.107,
						"acc_norm,none": 0.107,
						"acc_norm_stderr,none": 0.009779910359847167,
						"acc_stderr,none": 0.009779910359847167,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.148,
						"acc_norm,none": 0.148,
						"acc_norm_stderr,none": 0.011234866364235258,
						"acc_stderr,none": 0.011234866364235258,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.137,
						"acc_norm,none": 0.137,
						"acc_norm_stderr,none": 0.010878848714333315,
						"acc_stderr,none": 0.010878848714333315,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.221,
						"acc_norm,none": 0.221,
						"acc_norm_stderr,none": 0.013127502859696228,
						"acc_stderr,none": 0.013127502859696228,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.131,
						"acc_norm,none": 0.131,
						"acc_norm_stderr,none": 0.010674874844837954,
						"acc_stderr,none": 0.010674874844837954,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.196,
						"acc_norm,none": 0.196,
						"acc_norm_stderr,none": 0.012559527926707387,
						"acc_stderr,none": 0.012559527926707387,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.015865408450741202,
						"acc_stderr,none": 0.015865408450741202,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.184,
						"acc_norm,none": 0.184,
						"acc_norm_stderr,none": 0.012259457340938557,
						"acc_stderr,none": 0.012259457340938557,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.174,
						"acc_norm,none": 0.174,
						"acc_norm_stderr,none": 0.011994493230973412,
						"acc_stderr,none": 0.011994493230973412,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.010463483381956722,
						"acc_stderr,none": 0.010463483381956722,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.171,
						"acc_norm,none": 0.171,
						"acc_norm_stderr,none": 0.011912216456264618,
						"acc_stderr,none": 0.011912216456264618,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816506,
						"acc_stderr,none": 0.04229525846816506,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23333333333333334,
						"acc_norm,none": 0.23333333333333334,
						"acc_norm_stderr,none": 0.024459979523511404,
						"acc_stderr,none": 0.024459979523511404,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145144,
						"acc_stderr,none": 0.013979965645145144,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.107,
						"acc_norm,none": 0.107,
						"acc_norm_stderr,none": 0.009779910359847167,
						"acc_stderr,none": 0.009779910359847167,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.164,
						"acc_norm,none": 0.164,
						"acc_norm_stderr,none": 0.01171500069318131,
						"acc_stderr,none": 0.01171500069318131,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.02960162633044062,
						"acc_stderr,none": 0.02960162633044062,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.149,
						"acc_norm,none": 0.149,
						"acc_norm_stderr,none": 0.011266140684632175,
						"acc_stderr,none": 0.011266140684632175,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.228,
						"acc_norm,none": 0.228,
						"acc_norm_stderr,none": 0.013273740700804483,
						"acc_stderr,none": 0.013273740700804483,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.02983202555549525,
						"acc_stderr,none": 0.02983202555549525,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.14,
						"acc_norm,none": 0.14,
						"acc_norm_stderr,none": 0.010978183844357794,
						"acc_stderr,none": 0.010978183844357794,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5340934005700504,
						"acc_norm,none": 0.558,
						"acc_norm_stderr,none": 0.000494260521042081,
						"acc_stderr,none": 0.04325220377388345,
						"alias": "kobest",
						"f1,none": 0.45285034899266385,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5484330484330484,
						"acc_stderr,none": 0.013285993885047974,
						"alias": " - kobest_boolq",
						"f1,none": 0.4475377603002949,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.617,
						"acc_stderr,none": 0.015380102325652697,
						"alias": " - kobest_copa",
						"f1,none": 0.6157515768732612,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.436,
						"acc_norm,none": 0.558,
						"acc_norm_stderr,none": 0.02223197069632112,
						"acc_stderr,none": 0.0221989546414768,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.4311401471068777,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5491183879093199,
						"acc_stderr,none": 0.025004412942296047,
						"alias": " - kobest_sentineg",
						"f1,none": 0.4786012487802015,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4865079365079365,
						"acc_stderr,none": 0.014086365971849188,
						"alias": " - kobest_wic",
						"f1,none": 0.3299849842075286,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7182223947215215,
						"acc_stderr,none": 0.017368766029487847,
						"alias": "lambada",
						"perplexity,none": 3.6002256917311377,
						"perplexity_stderr,none": 0.17447573380469258
					},
					"lambada_cloze": {
						"acc,none": 0.04822433533863769,
						"acc_stderr,none": 0.0077288820544181595,
						"alias": "lambada_cloze",
						"perplexity,none": 583.9834172920337,
						"perplexity_stderr,none": 114.45949044873223
					},
					"lambada_multilingual": {
						"acc,none": 0.5374345041723269,
						"acc_stderr,none": 0.08717805057976721,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.086143578233564,
						"perplexity_stderr,none": 8.228344482254014
					},
					"lambada_openai": {
						"acc,none": 0.7512128856976519,
						"acc_stderr,none": 0.006022926018315462,
						"alias": " - lambada_openai",
						"perplexity,none": 3.2825187815533297,
						"perplexity_stderr,none": 0.06331468171650306
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.03396079953425189,
						"acc_stderr,none": 0.002523471480546155,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 359.68873392183383,
						"perplexity_stderr,none": 11.934943046674631
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42732388899670093,
						"acc_stderr,none": 0.00689199878844782,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.227300766052686,
						"perplexity_stderr,none": 1.881103938615858
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7506307005627789,
						"acc_stderr,none": 0.0060276319593311435,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.282095249299283,
						"perplexity_stderr,none": 0.0633232327338313
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4478944304288764,
						"acc_stderr,none": 0.006928049276239783,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.45811689214771,
						"perplexity_stderr,none": 1.4417836203708523
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5499708907432563,
						"acc_stderr,none": 0.006931101003281444,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.50764215228327,
						"perplexity_stderr,none": 0.7947525153557977
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5113526101300213,
						"acc_stderr,none": 0.0069641818508393285,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.955562831384857,
						"perplexity_stderr,none": 1.1483723497932705
					},
					"lambada_standard": {
						"acc,none": 0.685814088880264,
						"acc_stderr,none": 0.006467085866653897,
						"alias": " - lambada_standard",
						"perplexity,none": 3.9186512895204673,
						"perplexity_stderr,none": 0.07857503842857351
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.06248787114302348,
						"acc_stderr,none": 0.0033720840032029947,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 808.2781006622333,
						"perplexity_stderr,none": 30.051872705477063
					},
					"logiqa": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.29185867895545314,
						"acc_norm_stderr,none": 0.017831570553971925,
						"acc_stderr,none": 0.017162894755127066,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2741730279898219,
						"acc_norm,none": 0.2818066157760814,
						"acc_norm_stderr,none": 0.01135032245847965,
						"acc_stderr,none": 0.011254878812320587,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.24355108877721943,
						"acc_norm,none": 0.26030150753768844,
						"acc_norm_stderr,none": 0.008032787884882497,
						"acc_stderr,none": 0.007857518810292747,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.6367295064604956,
						"acc_stderr,none": 0.004949753268179166,
						"alias": "mc_taco",
						"f1,none": 0.5873435996150145,
						"f1_stderr,none": 0.006434704747321465
					},
					"medmcqa": {
						"acc,none": 0.3523786755916806,
						"acc_norm,none": 0.3523786755916806,
						"acc_norm_stderr,none": 0.007387089578379926,
						"acc_stderr,none": 0.007387089578379926,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3699921445404556,
						"acc_norm,none": 0.3699921445404556,
						"acc_norm_stderr,none": 0.013537101287089803,
						"acc_stderr,none": 0.013537101287089803,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.4125480700754878,
						"acc_stderr,none": 0.09611339133682227,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.45925925925925926,
						"acc_stderr,none": 0.04304979692464243,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.42105263157894735,
						"acc_stderr,none": 0.040179012759817494,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.05009082659620332,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.44528301886792454,
						"acc_stderr,none": 0.030588052974270658,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4027777777777778,
						"acc_stderr,none": 0.04101405519842425,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.048783173121456316,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3815028901734104,
						"acc_stderr,none": 0.03703851193099521,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.28431372549019607,
						"acc_stderr,none": 0.04488482852329017,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.05009082659620333,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3617021276595745,
						"acc_stderr,none": 0.031410821975962386,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2982456140350877,
						"acc_stderr,none": 0.04303684033537318,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.41379310344827586,
						"acc_stderr,none": 0.041042692118062316,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.30158730158730157,
						"acc_stderr,none": 0.0236369759961018,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.04006168083848877,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768077,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.45806451612903226,
						"acc_stderr,none": 0.028343787250540625,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.03178529710642749,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.049756985195624284,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5878787878787879,
						"acc_stderr,none": 0.03843566993588717,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4797979797979798,
						"acc_stderr,none": 0.03559443565563918,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5647668393782384,
						"acc_stderr,none": 0.03578038165008588,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.358974358974359,
						"acc_stderr,none": 0.02432173848460235,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.027840811495871934,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3739495798319328,
						"acc_stderr,none": 0.03142946637883708,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2185430463576159,
						"acc_stderr,none": 0.03374235550425694,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5596330275229358,
						"acc_stderr,none": 0.021284310623761547,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.0305467452649532,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5637254901960784,
						"acc_stderr,none": 0.03480693138457039,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.6497890295358649,
						"acc_stderr,none": 0.03105239193758435,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4080717488789238,
						"acc_stderr,none": 0.03298574607842822,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5190839694656488,
						"acc_stderr,none": 0.04382094705550988,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.38788522848034007,
						"acc_stderr,none": 0.10850461127764824,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4793388429752066,
						"acc_stderr,none": 0.04560456086387235,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.04833682445228318,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.4233128834355828,
						"acc_stderr,none": 0.03881891213334383,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.044328040552915185,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5533980582524272,
						"acc_stderr,none": 0.04922424153458933,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6623931623931624,
						"acc_stderr,none": 0.030980296992618558,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.05024183937956912,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.561941251596424,
						"acc_stderr,none": 0.017742232238257237,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.4046242774566474,
						"acc_stderr,none": 0.026424816594009852,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.21787709497206703,
						"acc_stderr,none": 0.013806211780732984,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.477124183006536,
						"acc_stderr,none": 0.028599936776089782,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.46572256195687156,
						"acc_stderr,none": 0.08647027508358718,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.5144694533762058,
						"acc_stderr,none": 0.02838619808417768,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.44135802469135804,
						"acc_stderr,none": 0.027628737155668773,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2978723404255319,
						"acc_stderr,none": 0.02728160834446941,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3272490221642764,
						"acc_stderr,none": 0.011983819806464742,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3897058823529412,
						"acc_stderr,none": 0.0296246635811597,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.43137254901960786,
						"acc_stderr,none": 0.02003639376835263,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.44545454545454544,
						"acc_stderr,none": 0.047605488214603246,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.39183673469387753,
						"acc_stderr,none": 0.03125127591089165,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4631134221644459,
						"acc_stderr,none": 0.07534637607080809,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5870646766169154,
						"acc_stderr,none": 0.03481520803367348,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.34760545512210606,
						"acc_stderr,none": 0.07312862053235213,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3795180722891566,
						"acc_stderr,none": 0.037777988227480165,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6023391812865497,
						"acc_stderr,none": 0.037536389557616893,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7930718288334182,
						"acc_stderr,none": 0.004089244183100349,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.8124491456468673,
						"acc_stderr,none": 0.003936939750256032,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7745098039215687,
						"acc_stderr,none": 0.020714768648824097,
						"alias": "mrpc",
						"f1,none": 0.8481848184818482,
						"f1_stderr,none": 0.01569966689189495
					},
					"multimedqa": {
						"acc,none": 0.39048970901348473,
						"acc_norm,none": 0.3587351329877091,
						"acc_norm_stderr,none": 0.0001103090951982101,
						"acc_stderr,none": 0.08199898787604479,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5622937293729373,
						"acc_stderr,none": 0.00712584701954709,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7076749450802534,
						"mrr_stderr,none": 0.010314017834722279,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407455,
						"r@2,none": 0.4164785553047404,
						"r@2_stderr,none": 0.01657116712766196
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6586719353096749,
						"mrr_stderr,none": 0.010413744258480483,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4683972911963883,
						"r@2_stderr,none": 0.016773710557640355
					},
					"openbookqa": {
						"acc,none": 0.294,
						"acc_norm,none": 0.402,
						"acc_norm_stderr,none": 0.021948929609938612,
						"acc_stderr,none": 0.020395095484936614,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4385,
						"acc_stderr,none": 0.011098218786369077,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3945,
						"acc_stderr,none": 0.01093135958200793,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4005,
						"acc_stderr,none": 0.010959467594960344,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.549,
						"acc_stderr,none": 0.011129305041886322,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.011135708419359796,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.01115079235234166,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5245,
						"acc_stderr,none": 0.011169702598013186,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4844285714285715,
						"acc_stderr,none": 0.05429494597872282,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7752992383025027,
						"acc_norm,none": 0.7769314472252449,
						"acc_norm_stderr,none": 0.009713057213018529,
						"acc_stderr,none": 0.009738282586548361,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.23436165670367207,
						"acc_norm,none": 0.2756725021349274,
						"acc_norm_stderr,none": 0.0032646555169875877,
						"acc_stderr,none": 0.003094769217186555,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.020591649571224932,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7380420589998629,
						"acc_norm,none": 0.592985728176822,
						"acc_norm_stderr,none": 0.00840798574418502,
						"acc_stderr,none": 0.15165565288003244,
						"alias": "pythia",
						"bits_per_byte,none": 0.6340000603197895,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.551861773433815,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2825187815533297,
						"perplexity_stderr,none": 0.06331468171650306,
						"word_perplexity,none": 10.4852112832597,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.375886524822695,
						"acc_norm,none": 0.425531914893617,
						"acc_norm_stderr,none": 0.059833619700894396,
						"acc_stderr,none": 0.039172632951299885,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.5416666666666666,
						"acc_norm_stderr,none": 0.04567549854280213,
						"acc_stderr,none": 0.0451938453788867,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3625,
						"acc_norm,none": 0.38125,
						"acc_norm_stderr,none": 0.038518021388670956,
						"acc_stderr,none": 0.038123743406448925,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.36619718309859156,
						"acc_norm,none": 0.4014084507042254,
						"acc_norm_stderr,none": 0.02913837502274765,
						"acc_stderr,none": 0.02863791293383347,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.734232005936186,
						"acc_stderr,none": 0.0021969566903629516,
						"alias": "qqp",
						"f1,none": 0.7173037964692572,
						"f1_stderr,none": 0.002609759878787315
					},
					"race": {
						"acc,none": 0.34545454545454546,
						"acc_stderr,none": 0.014716858425461343,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2278,
						"em_stderr,none": 0.004194338475555284,
						"f1,none": 0.23686000022441148,
						"f1_stderr,none": 0.004214406398614817
					},
					"rte": {
						"acc,none": 0.7653429602888087,
						"acc_stderr,none": 0.025508815854976198,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.924,
						"acc_norm,none": 0.861,
						"acc_norm_stderr,none": 0.010945263761042965,
						"acc_stderr,none": 0.00838416926679639,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.7653429602888087,
						"acc_stderr,none": 0.025508815854976198,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8876146788990825,
						"acc_stderr,none": 0.010701827730093282,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5791262621213636,
						"acc_norm,none": 0.7722683195041488,
						"acc_norm_stderr,none": 0.002965012695529271,
						"acc_stderr,none": 0.0034905451946217097,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6697946823732988,
						"acc_stderr,none": 0.08612731890162503,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5605969551282052,
						"acc_stderr,none": 0.004967368314759427,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8928752407013276,
						"acc_stderr,none": 0.0031136497950160166,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5608823529411765,
						"acc_stderr,none": 0.004914140140548051,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.34668011043267505,
						"acc_stderr,none": 0.0017250397595881194,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.36964504283965727,
						"bleu_acc_stderr,none": 0.016898180706973895,
						"bleu_diff,none": -4.614693101299436,
						"bleu_diff_stderr,none": 0.8504537836168142,
						"bleu_max,none": 25.862220055641405,
						"bleu_max_stderr,none": 0.7929138750930529,
						"rouge1_acc,none": 0.3317013463892289,
						"rouge1_acc_stderr,none": 0.016482148810241473,
						"rouge1_diff,none": -6.002948469529099,
						"rouge1_diff_stderr,none": 0.9675382290582,
						"rouge1_max,none": 51.14851831893734,
						"rouge1_max_stderr,none": 0.8589736557732265,
						"rouge2_acc,none": 0.29008567931456547,
						"rouge2_acc_stderr,none": 0.01588623687420952,
						"rouge2_diff,none": -7.385388894027129,
						"rouge2_diff_stderr,none": 1.1552207099752347,
						"rouge2_max,none": 35.25623300551201,
						"rouge2_max_stderr,none": 1.0158567172510493,
						"rougeL_acc,none": 0.3329253365973072,
						"rougeL_acc_stderr,none": 0.016497402382012052,
						"rougeL_diff,none": -6.25156253036408,
						"rougeL_diff_stderr,none": 0.9816664026630537,
						"rougeL_max,none": 48.200946760002516,
						"rougeL_max_stderr,none": 0.8811265093627599
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.36964504283965727,
						"bleu_acc_stderr,none": 0.016898180706973895,
						"bleu_diff,none": -4.614693101299436,
						"bleu_diff_stderr,none": 0.8504537836168142,
						"bleu_max,none": 25.862220055641405,
						"bleu_max_stderr,none": 0.7929138750930529,
						"rouge1_acc,none": 0.3317013463892289,
						"rouge1_acc_stderr,none": 0.016482148810241473,
						"rouge1_diff,none": -6.002948469529099,
						"rouge1_diff_stderr,none": 0.9675382290582,
						"rouge1_max,none": 51.14851831893734,
						"rouge1_max_stderr,none": 0.8589736557732265,
						"rouge2_acc,none": 0.29008567931456547,
						"rouge2_acc_stderr,none": 0.01588623687420952,
						"rouge2_diff,none": -7.385388894027129,
						"rouge2_diff_stderr,none": 1.1552207099752347,
						"rouge2_max,none": 35.25623300551201,
						"rouge2_max_stderr,none": 1.0158567172510493,
						"rougeL_acc,none": 0.3329253365973072,
						"rougeL_acc_stderr,none": 0.016497402382012052,
						"rougeL_diff,none": -6.25156253036408,
						"rougeL_diff_stderr,none": 0.9816664026630537,
						"rougeL_max,none": 48.200946760002516,
						"rougeL_max_stderr,none": 0.8811265093627599
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2692778457772338,
						"acc_stderr,none": 0.015528566637087283,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4240823750881163,
						"acc_stderr,none": 0.014554409852536549,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"wic": {
						"acc,none": 0.5799373040752351,
						"acc_stderr,none": 0.01955590253723442,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6340000603197895,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.551861773433815,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.4852112832597,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7119179163378059,
						"acc_stderr,none": 0.012727884724248115,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.05961305784972239,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8534798534798534,
						"acc_stderr,none": 0.02144178105803405,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6216363636363635,
						"acc_stderr,none": 0.06984389251251383,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.021772369465547194,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.022352791650914167,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.020313179231745186,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.019827714859587574,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.022261697292270132,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.022187215803029004,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.648,
						"acc_stderr,none": 0.021380042385946058,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.02009995064750323,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.020704041021724788,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43809906291834005,
						"acc_stderr,none": 0.04978173817572529,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.009460223484996469,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4895582329317269,
						"acc_stderr,none": 0.010019887205677426,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4867469879518072,
						"acc_stderr,none": 0.010018551648218457,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38313253012048193,
						"acc_stderr,none": 0.009744464994287525,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5277108433734939,
						"acc_stderr,none": 0.010006669313970323,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4903614457831325,
						"acc_stderr,none": 0.010020210558438304,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4939759036144578,
						"acc_stderr,none": 0.010021345444047586,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42008032128514056,
						"acc_stderr,none": 0.009893219469115703,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4835341365461847,
						"acc_stderr,none": 0.010016636930829973,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.40803212851405624,
						"acc_stderr,none": 0.009851078965044875,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.4248995983935743,
						"acc_stderr,none": 0.009908377568198198,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.46184738955823296,
						"acc_stderr,none": 0.00999285357974996,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40803212851405624,
						"acc_stderr,none": 0.009851078965044887,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40321285140562246,
						"acc_stderr,none": 0.009832511560868064,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.009593947957927137,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6296853378256423,
						"acc_stderr,none": 0.061607409074009495,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5943084050297816,
						"acc_stderr,none": 0.012636170220503923,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7776307081403044,
						"acc_stderr,none": 0.010701277694882511,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7160820648577101,
						"acc_stderr,none": 0.01160350886776313,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5658504301786896,
						"acc_stderr,none": 0.012755046289912218,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.600926538716082,
						"acc_stderr,none": 0.012602266005184312,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6677696889477167,
						"acc_stderr,none": 0.012121168923544609,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5407015221707479,
						"acc_stderr,none": 0.012824422739625592,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6803441429516877,
						"acc_stderr,none": 0.01200099306329728,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5532759761747187,
						"acc_stderr,none": 0.012793874526730203,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5883520847121112,
						"acc_stderr,none": 0.012664648329214077,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6412971542025149,
						"acc_stderr,none": 0.012342655113112376,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8102944481906046,
						"acc_stderr,none": 0.03441658645048334,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8653763440860215,
						"acc_stderr,none": 0.00708019367710426,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7382690302398331,
						"acc_stderr,none": 0.0142020856634007,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7908745247148289,
						"acc_stderr,none": 0.025125031682933376,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.7047619047619048,
						"acc_stderr,none": 0.02574201764583702,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7857142857142857,
						"acc_stderr,none": 0.018295527755776194,
						"alias": " - xwinograd_zh"
					}
				}
			},
			"dtype=float16,trust_remote_code=True": {
				"confObj": {
					"dtype": "float16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=float16,trust_remote_code=True",
				"groups": {
					"mmlu": {
						"acc,none": 0.3473151972653468,
						"acc_stderr,none": 0.06723027102082926,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.33645058448459086,
						"acc_stderr,none": 0.07238334022440406,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.38010943031863537,
						"acc_stderr,none": 0.060150626992307374,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.36951576210594733,
						"acc_stderr,none": 0.05233010781914716,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.309546463685379,
						"acc_stderr,none": 0.06632284612641635,
						"alias": " - stem"
					}
				},
				"results": {
					"arc_challenge": {
						"acc,none": 0.4283276450511945,
						"acc_norm,none": 0.4786689419795222,
						"acc_norm_stderr,none": 0.014598087973127104,
						"acc_stderr,none": 0.014460496367599034,
						"alias": "arc_challenge"
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.12661106899166036,
						"exact_match_stderr,get-answer": 0.00915971528308109
					},
					"hellaswag": {
						"acc,none": 0.5396335391356304,
						"acc_norm,none": 0.7338179645488947,
						"acc_norm_stderr,none": 0.004410573431837634,
						"acc_stderr,none": 0.004974080638364259,
						"alias": "hellaswag"
					},
					"mmlu": {
						"acc,none": 0.3473151972653468,
						"acc_stderr,none": 0.06723027102082926,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.3037037037037037,
						"acc_stderr,none": 0.039725528847851375,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3618421052631579,
						"acc_stderr,none": 0.03910525752849725,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145633,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.32075471698113206,
						"acc_stderr,none": 0.02872750295788026,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3611111111111111,
						"acc_stderr,none": 0.040166600304512336,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.047609522856952365,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.34104046242774566,
						"acc_stderr,none": 0.03614665424180826,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.18627450980392157,
						"acc_stderr,none": 0.03873958714149354,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.050161355804659205,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3021276595744681,
						"acc_stderr,none": 0.030017554471880557,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2807017543859649,
						"acc_stderr,none": 0.04227054451232199,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.3793103448275862,
						"acc_stderr,none": 0.040434618619167466,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.02326651221373057,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.24603174603174602,
						"acc_stderr,none": 0.03852273364924316,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4032258064516129,
						"acc_stderr,none": 0.02790615082604114,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2955665024630542,
						"acc_stderr,none": 0.032104944337514575,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.509090909090909,
						"acc_stderr,none": 0.039036986477484416,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3181818181818182,
						"acc_stderr,none": 0.03318477333845331,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.40414507772020725,
						"acc_stderr,none": 0.0354150857888402,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.023901157979402534,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23333333333333334,
						"acc_stderr,none": 0.025787874220959316,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.28991596638655465,
						"acc_stderr,none": 0.029472485833136094,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23178807947019867,
						"acc_stderr,none": 0.03445406271987053,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.41284403669724773,
						"acc_stderr,none": 0.02110912813341391,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.21296296296296297,
						"acc_stderr,none": 0.027920963147993662,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4166666666666667,
						"acc_stderr,none": 0.03460228327239171,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.510548523206751,
						"acc_stderr,none": 0.032539983791662855,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4170403587443946,
						"acc_stderr,none": 0.03309266936071721,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.4198473282442748,
						"acc_stderr,none": 0.04328577215262972,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.33645058448459086,
						"acc_stderr,none": 0.07238334022440406,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.3884297520661157,
						"acc_stderr,none": 0.04449270350068382,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3888888888888889,
						"acc_stderr,none": 0.0471282125742677,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.4294478527607362,
						"acc_stderr,none": 0.03889066619112722,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3392857142857143,
						"acc_stderr,none": 0.04493949068613539,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.42718446601941745,
						"acc_stderr,none": 0.04897957737781168,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.03275608910402091,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4495530012771392,
						"acc_stderr,none": 0.017788725283507337,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3670520231213873,
						"acc_stderr,none": 0.025950054337654085,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24581005586592178,
						"acc_stderr,none": 0.014400296429225615,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.33986928104575165,
						"acc_stderr,none": 0.027121956071388856,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.38010943031863537,
						"acc_stderr,none": 0.060150626992307374,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3504823151125402,
						"acc_stderr,none": 0.027098652621301744,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.37037037037037035,
						"acc_stderr,none": 0.026869490744815257,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.3120567375886525,
						"acc_stderr,none": 0.027640120545169934,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.29465449804432853,
						"acc_stderr,none": 0.011643576764069545,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3382352941176471,
						"acc_stderr,none": 0.028739328513983576,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3431372549019608,
						"acc_stderr,none": 0.01920660684882536,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4090909090909091,
						"acc_stderr,none": 0.04709306978661896,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.39183673469387753,
						"acc_stderr,none": 0.031251275910891656,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.36951576210594733,
						"acc_stderr,none": 0.05233010781914716,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.4228855721393035,
						"acc_stderr,none": 0.034932317774212816,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.309546463685379,
						"acc_stderr,none": 0.06632284612641635,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.05024183937956911,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.27710843373493976,
						"acc_stderr,none": 0.03484331592680589,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.43859649122807015,
						"acc_stderr,none": 0.038057975055904594,
						"alias": "  - world_religions"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.42407801430742353,
						"acc_stderr,none": 0.014548558208859882,
						"alias": "truthfulqa_mc2"
					},
					"winogrande": {
						"acc,none": 0.7308602999210734,
						"acc_stderr,none": 0.012464911951268734,
						"alias": "winogrande"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/EagleX_1-7T_Chat_pth"
	},
	"./rwkv-x-dev/Hermes-RWKV-v5-7B_pth": {
		"config": {
			"dtype=float16,trust_remote_code=True": {
				"confObj": {
					"dtype": "float16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=float16,trust_remote_code=True",
				"groups": {
					"mmlu": {
						"acc,none": 0.3166215638797892,
						"acc_stderr,none": 0.00388470559897429,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3141339001062699,
						"acc_stderr,none": 0.006704234651858884,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.36498229803669135,
						"acc_stderr,none": 0.00853170443009378,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3230419239519012,
						"acc_stderr,none": 0.008407738163570856,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2664129400570885,
						"acc_stderr,none": 0.007824542079053826,
						"alias": " - stem"
					}
				},
				"results": {
					"arc_challenge": {
						"acc,none": 0.43600682593856654,
						"acc_norm,none": 0.4803754266211604,
						"acc_norm_stderr,none": 0.01460013207594709,
						"acc_stderr,none": 0.014491225699230916,
						"alias": "arc_challenge"
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,flexible-extract": 0.22289613343442002,
						"exact_match,strict-match": 0.14404852160727824,
						"exact_match_stderr,flexible-extract": 0.011463918693740494,
						"exact_match_stderr,strict-match": 0.009672110973065275
					},
					"hellaswag": {
						"acc,none": 0.5372435769766979,
						"acc_norm,none": 0.7288388767177854,
						"acc_norm_stderr,none": 0.004436505187567006,
						"acc_stderr,none": 0.004975919665116542,
						"alias": "hellaswag"
					},
					"mmlu": {
						"acc,none": 0.3166215638797892,
						"acc_stderr,none": 0.00388470559897429,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.28888888888888886,
						"acc_stderr,none": 0.0391545063041425,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.03459777606810535,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2943396226415094,
						"acc_stderr,none": 0.028049186315695245,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2638888888888889,
						"acc_stderr,none": 0.03685651095897532,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816508,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816508,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322695,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2543352601156069,
						"acc_stderr,none": 0.03320556443085569,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.040925639582376556,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3404255319148936,
						"acc_stderr,none": 0.030976692998534443,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.03999423879281335,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.296551724137931,
						"acc_stderr,none": 0.03806142687309994,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24338624338624337,
						"acc_stderr,none": 0.022101128787415426,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.25396825396825395,
						"acc_stderr,none": 0.03893259610604674,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3935483870967742,
						"acc_stderr,none": 0.027791878753132264,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.20689655172413793,
						"acc_stderr,none": 0.02850137816789395,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.046482319871173156,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.4666666666666667,
						"acc_stderr,none": 0.03895658065271847,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.03358618145732523,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.37305699481865284,
						"acc_stderr,none": 0.03490205592048574,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.28205128205128205,
						"acc_stderr,none": 0.0228158130988966,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23333333333333334,
						"acc_stderr,none": 0.02578787422095931,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2773109243697479,
						"acc_stderr,none": 0.02907937453948001,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2119205298013245,
						"acc_stderr,none": 0.03336767086567977,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3614678899082569,
						"acc_stderr,none": 0.020598082009937364,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.18055555555555555,
						"acc_stderr,none": 0.026232878971491666,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.3627450980392157,
						"acc_stderr,none": 0.03374499356319355,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.4008438818565401,
						"acc_stderr,none": 0.031900803894732356,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4349775784753363,
						"acc_stderr,none": 0.03327283370271345,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3511450381679389,
						"acc_stderr,none": 0.04186445163013751,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3141339001062699,
						"acc_stderr,none": 0.006704234651858884,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4214876033057851,
						"acc_stderr,none": 0.045077322787750944,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.37962962962962965,
						"acc_stderr,none": 0.04691521224077742,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.4233128834355828,
						"acc_stderr,none": 0.03881891213334384,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.044328040552915185,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.3106796116504854,
						"acc_stderr,none": 0.04582124160161551,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.452991452991453,
						"acc_stderr,none": 0.03261099873098619,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.45977011494252873,
						"acc_stderr,none": 0.017821994096933535,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.33815028901734107,
						"acc_stderr,none": 0.02546977014940017,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24692737430167597,
						"acc_stderr,none": 0.01442229220480884,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.32679738562091504,
						"acc_stderr,none": 0.026857294663281402,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.36498229803669135,
						"acc_stderr,none": 0.00853170443009378,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3440514469453376,
						"acc_stderr,none": 0.02698147804364802,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.025630824975621344,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25177304964539005,
						"acc_stderr,none": 0.0258921511567094,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2711864406779661,
						"acc_stderr,none": 0.011354581451622985,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.27941176470588236,
						"acc_stderr,none": 0.027257202606114948,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.29901960784313725,
						"acc_stderr,none": 0.018521756215423024,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.34545454545454546,
						"acc_stderr,none": 0.04554619617541054,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2816326530612245,
						"acc_stderr,none": 0.028795185574291286,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3230419239519012,
						"acc_stderr,none": 0.008407738163570856,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.3781094527363184,
						"acc_stderr,none": 0.034288678487786564,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2664129400570885,
						"acc_stderr,none": 0.007824542079053826,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.049888765156985884,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.37349397590361444,
						"acc_stderr,none": 0.03765845117168862,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.4619883040935672,
						"acc_stderr,none": 0.038237270928823064,
						"alias": "  - world_religions"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4201545730278454,
						"acc_stderr,none": 0.014450400157651309,
						"alias": "truthfulqa_mc2"
					},
					"winogrande": {
						"acc,none": 0.6953433307024467,
						"acc_stderr,none": 0.0129356464993253,
						"alias": "winogrande"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/Hermes-RWKV-v5-7B_pth"
	},
	"./rwkv-x-dev/Quetzal-N8-1": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6226042841037204,
						"acc_norm,none": 0.6319052987598647,
						"acc_norm_stderr,none": 0.08884786132949501,
						"acc_stderr,none": 0.103225014469997,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.34875,
						"acc_stderr,none": 0.01538057765413352,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8365671641791045,
						"acc_stderr,none": 0.15150043819208528,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.29839405974788474,
						"acc_norm,none": 0.29839405974788474,
						"acc_norm_stderr,none": 0.05359081167266252,
						"acc_stderr,none": 0.05359081167266252,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5093895114617711,
						"acc_stderr,none": 0.09360226157209069,
						"alias": "glue",
						"f1,none": 0.6334377064699201,
						"f1_stderr,none": 0.00031936062925547746,
						"mcc,none": 0.03640174058932784,
						"mcc_stderr,none": 0.0011625627927557005
					},
					"lambada": {
						"acc,none": 0.7152144381913449,
						"acc_stderr,none": 0.02025464229078477,
						"alias": "lambada",
						"perplexity,none": 3.7142611589589056,
						"perplexity_stderr,none": 0.2798699028664125
					},
					"lambada_multilingual": {
						"acc,none": 0.5422860469629343,
						"acc_stderr,none": 0.0882549852322722,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.57589708252773,
						"perplexity_stderr,none": 8.64787284286979
					},
					"mmlu": {
						"acc,none": 0.3157669847600057,
						"acc_stderr,none": 0.053837382580476996,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.31137088204038255,
						"acc_stderr,none": 0.05034350770386185,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.34985516575474734,
						"acc_stderr,none": 0.04341556800869804,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3357166070848229,
						"acc_stderr,none": 0.045722398803210426,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.269267364414843,
						"acc_stderr,none": 0.05591862906927716,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4567857142857143,
						"acc_stderr,none": 0.05475184126493959,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7402196165069074,
						"acc_norm,none": 0.6355504235769488,
						"acc_norm_stderr,none": 0.009836211175315098,
						"acc_stderr,none": 0.1447341919531007,
						"alias": "pythia",
						"bits_per_byte,none": 0.6422625445973058,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5607749675286307,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.1790488450061614,
						"perplexity_stderr,none": 0.06259688004835295,
						"word_perplexity,none": 10.811293630189443,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3144972512040194,
						"acc_stderr,none": 0.0013827631630629258,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.29865361077111385,
						"bleu_acc_stderr,none": 0.016021570613768545,
						"bleu_diff,none": -8.513877363517,
						"bleu_diff_stderr,none": 0.855612434003918,
						"bleu_max,none": 26.04555550224924,
						"bleu_max_stderr,none": 0.8072980593399709,
						"rouge1_acc,none": 0.2729498164014688,
						"rouge1_acc_stderr,none": 0.015594753632006518,
						"rouge1_diff,none": -10.736743776020543,
						"rouge1_diff_stderr,none": 0.9052863531466151,
						"rouge1_max,none": 50.45238788723216,
						"rouge1_max_stderr,none": 0.8982794935027536,
						"rouge2_acc,none": 0.23745410036719705,
						"rouge2_acc_stderr,none": 0.014896277441041843,
						"rouge2_diff,none": -12.957575841799134,
						"rouge2_diff_stderr,none": 1.0983477121627219,
						"rouge2_max,none": 34.336855746687114,
						"rouge2_max_stderr,none": 1.0262250402818764,
						"rougeL_acc,none": 0.2692778457772338,
						"rougeL_acc_stderr,none": 0.01552856663708729,
						"rougeL_diff,none": -11.034263329997641,
						"rougeL_diff_stderr,none": 0.9226370448066114,
						"rougeL_max,none": 47.57654323645,
						"rougeL_max_stderr,none": 0.9116157724908629
					},
					"xcopa": {
						"acc,none": 0.610909090909091,
						"acc_stderr,none": 0.07050016964403515,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4312182061579652,
						"acc_stderr,none": 0.052715768158511686,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6166295650081223,
						"acc_stderr,none": 0.06563719147480171,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8015284333558103,
						"acc_stderr,none": 0.038247518612836776,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6226042841037204,
						"acc_norm,none": 0.6319052987598647,
						"acc_norm_stderr,none": 0.08884786132949501,
						"acc_stderr,none": 0.103225014469997,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.34875,
						"acc_stderr,none": 0.01538057765413352,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.015167928865407559,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.339,
						"acc_stderr,none": 0.01497675877162034,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3491666666666667,
						"acc_stderr,none": 0.01376707539507725,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4044368600682594,
						"acc_norm,none": 0.4445392491467577,
						"acc_norm_stderr,none": 0.01452122640562707,
						"acc_stderr,none": 0.014342036483436177,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7302188552188552,
						"acc_norm,none": 0.7243265993265994,
						"acc_norm_stderr,none": 0.009169229476542577,
						"acc_stderr,none": 0.009107527914671064,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8365671641791045,
						"acc_stderr,none": 0.15150043819208528,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.00914437639315109,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426618,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578141,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386702,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024963,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.765,
						"acc_stderr,none": 0.013414729030247102,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.569,
						"acc_stderr,none": 0.0156679444881735,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.788,
						"acc_stderr,none": 0.01293148186493802,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.01057013376110866,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178353,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704159,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.0073953154557929385,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427421,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108663,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942319,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274701,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345674,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.752,
						"acc_stderr,none": 0.013663187134877637,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.761,
						"acc_stderr,none": 0.013493000446937591,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.012953717566737228,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244052,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.835,
						"acc_stderr,none": 0.011743632866916149,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298263,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.297,
						"acc_stderr,none": 0.014456832294801105,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343968,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.012886662332274536,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.689,
						"acc_stderr,none": 0.014645596385722694,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.838,
						"acc_stderr,none": 0.01165726777130444,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323494,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024949,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295437,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524301,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.689,
						"acc_stderr,none": 0.014645596385722694,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524279,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.015773547629015113,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.014414290540008205,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.014580006055436965,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452374,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936717,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653867,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651533,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.775,
						"acc_stderr,none": 0.013211720158614753,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.007088105617246438,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.00199699473909873,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621231,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.583,
						"acc_stderr,none": 0.015599819048769618,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.005568393575081342,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.00891686663074591,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.002818500300504505,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.739,
						"acc_stderr,none": 0.013895037677965126,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.015801065586651758,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904612,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.671,
						"acc_stderr,none": 0.01486539538592836,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408021,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.010605256784796561,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557843981,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160336,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697057,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557412,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.0045364721513065035,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410042,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.382,
						"acc_stderr,none": 0.015372453034968528,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.344,
						"acc_stderr,none": 0.015029633724408945,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.29839405974788474,
						"acc_norm,none": 0.29839405974788474,
						"acc_norm_stderr,none": 0.05359081167266252,
						"acc_stderr,none": 0.05359081167266252,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.037698374558241474,
						"acc_stderr,none": 0.037698374558241474,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.033047561588107864,
						"acc_stderr,none": 0.033047561588107864,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.3375,
						"acc_norm,none": 0.3375,
						"acc_norm_stderr,none": 0.03749999999999997,
						"acc_stderr,none": 0.03749999999999997,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.28484848484848485,
						"acc_norm,none": 0.28484848484848485,
						"acc_norm_stderr,none": 0.035243908445117836,
						"acc_stderr,none": 0.035243908445117836,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3397129186602871,
						"acc_norm,none": 0.3397129186602871,
						"acc_norm_stderr,none": 0.032839063537459336,
						"acc_stderr,none": 0.032839063537459336,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.23125,
						"acc_norm,none": 0.23125,
						"acc_norm_stderr,none": 0.033437582657277434,
						"acc_stderr,none": 0.033437582657277434,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.29770992366412213,
						"acc_norm,none": 0.29770992366412213,
						"acc_norm_stderr,none": 0.040103589424622034,
						"acc_stderr,none": 0.040103589424622034,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.04026377210787311,
						"acc_stderr,none": 0.04026377210787311,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.29906542056074764,
						"acc_norm,none": 0.29906542056074764,
						"acc_norm_stderr,none": 0.044470182376718334,
						"acc_stderr,none": 0.044470182376718334,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3219814241486068,
						"acc_norm,none": 0.3219814241486068,
						"acc_norm_stderr,none": 0.026038038744338656,
						"acc_stderr,none": 0.026038038744338656,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.03283472056108567,
						"acc_stderr,none": 0.03283472056108567,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.30726256983240224,
						"acc_norm,none": 0.30726256983240224,
						"acc_norm_stderr,none": 0.03458033173302766,
						"acc_stderr,none": 0.03458033173302766,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.28270042194092826,
						"acc_norm,none": 0.28270042194092826,
						"acc_norm_stderr,none": 0.029312814153955924,
						"acc_stderr,none": 0.029312814153955924,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.040225592469367126,
						"acc_stderr,none": 0.040225592469367126,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.38317757009345793,
						"acc_norm,none": 0.38317757009345793,
						"acc_norm_stderr,none": 0.047220130807712334,
						"acc_stderr,none": 0.047220130807712334,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.39622641509433965,
						"acc_norm,none": 0.39622641509433965,
						"acc_norm_stderr,none": 0.04773249298367361,
						"acc_stderr,none": 0.04773249298367361,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.0401910747255735,
						"acc_stderr,none": 0.0401910747255735,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.29245283018867924,
						"acc_norm,none": 0.29245283018867924,
						"acc_norm_stderr,none": 0.044392639061996295,
						"acc_stderr,none": 0.044392639061996295,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.30036630036630035,
						"acc_norm,none": 0.30036630036630035,
						"acc_norm_stderr,none": 0.027795629283121376,
						"acc_stderr,none": 0.027795629283121376,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3431372549019608,
						"acc_norm,none": 0.3431372549019608,
						"acc_norm_stderr,none": 0.03332139944668086,
						"acc_stderr,none": 0.03332139944668086,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.29239766081871343,
						"acc_norm,none": 0.29239766081871343,
						"acc_norm_stderr,none": 0.03488647713457922,
						"acc_stderr,none": 0.03488647713457922,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.034531515032766795,
						"acc_stderr,none": 0.034531515032766795,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.28776978417266186,
						"acc_norm,none": 0.28776978417266186,
						"acc_norm_stderr,none": 0.03853836179233389,
						"acc_stderr,none": 0.03853836179233389,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.32075471698113206,
						"acc_norm,none": 0.32075471698113206,
						"acc_norm_stderr,none": 0.03713396279871007,
						"acc_stderr,none": 0.03713396279871007,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3128834355828221,
						"acc_norm,none": 0.3128834355828221,
						"acc_norm_stderr,none": 0.03642914578292405,
						"acc_stderr,none": 0.03642914578292405,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.29069767441860467,
						"acc_norm,none": 0.29069767441860467,
						"acc_norm_stderr,none": 0.03472469304477598,
						"acc_stderr,none": 0.03472469304477598,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.02827139981698855,
						"acc_stderr,none": 0.02827139981698855,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.03191178226713548,
						"acc_stderr,none": 0.03191178226713548,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3949579831932773,
						"acc_norm,none": 0.3949579831932773,
						"acc_norm_stderr,none": 0.03175367846096625,
						"acc_stderr,none": 0.03175367846096625,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26956521739130435,
						"acc_norm,none": 0.26956521739130435,
						"acc_norm_stderr,none": 0.029322764228949527,
						"acc_stderr,none": 0.029322764228949527,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3146853146853147,
						"acc_norm,none": 0.3146853146853147,
						"acc_norm_stderr,none": 0.03897077881510411,
						"acc_stderr,none": 0.03897077881510411,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.3068181818181818,
						"acc_norm,none": 0.3068181818181818,
						"acc_norm_stderr,none": 0.03486142240553238,
						"acc_stderr,none": 0.03486142240553238,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3624161073825503,
						"acc_norm,none": 0.3624161073825503,
						"acc_norm_stderr,none": 0.03951314120201285,
						"acc_stderr,none": 0.03951314120201285,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101963,
						"acc_stderr,none": 0.03334150198101963,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552485,
						"acc_stderr,none": 0.03703667194552485,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2966101694915254,
						"acc_norm,none": 0.2966101694915254,
						"acc_norm_stderr,none": 0.042227768322336275,
						"acc_stderr,none": 0.042227768322336275,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.036942843353378,
						"acc_stderr,none": 0.036942843353378,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3006993006993007,
						"acc_norm,none": 0.3006993006993007,
						"acc_norm_stderr,none": 0.03848167949490064,
						"acc_stderr,none": 0.03848167949490064,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.31746031746031744,
						"acc_norm,none": 0.31746031746031744,
						"acc_norm_stderr,none": 0.04163453031302859,
						"acc_stderr,none": 0.04163453031302859,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.3027027027027027,
						"acc_norm,none": 0.3027027027027027,
						"acc_norm_stderr,none": 0.03386945658791666,
						"acc_stderr,none": 0.03386945658791666,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.32558139534883723,
						"acc_norm,none": 0.32558139534883723,
						"acc_norm_stderr,none": 0.03583410038767278,
						"acc_stderr,none": 0.03583410038767278,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2749391727493917,
						"acc_norm,none": 0.2749391727493917,
						"acc_norm_stderr,none": 0.022050254355995072,
						"acc_stderr,none": 0.022050254355995072,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.37383177570093457,
						"acc_norm,none": 0.37383177570093457,
						"acc_norm_stderr,none": 0.03315078506050491,
						"acc_stderr,none": 0.03315078506050491,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2845528455284553,
						"acc_norm,none": 0.2845528455284553,
						"acc_norm_stderr,none": 0.040849837332392225,
						"acc_stderr,none": 0.040849837332392225,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3360655737704918,
						"acc_norm,none": 0.3360655737704918,
						"acc_norm_stderr,none": 0.042941965824810475,
						"acc_stderr,none": 0.042941965824810475,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3238095238095238,
						"acc_norm,none": 0.3238095238095238,
						"acc_norm_stderr,none": 0.032367278954043524,
						"acc_stderr,none": 0.032367278954043524,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3055555555555556,
						"acc_norm,none": 0.3055555555555556,
						"acc_norm_stderr,none": 0.03443002441392583,
						"acc_stderr,none": 0.03443002441392583,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31216931216931215,
						"acc_norm,none": 0.31216931216931215,
						"acc_norm_stderr,none": 0.03379535035917228,
						"acc_stderr,none": 0.03379535035917228,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.0399037253226882,
						"acc_stderr,none": 0.0399037253226882,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.03855289616378948,
						"acc_stderr,none": 0.03855289616378948,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3238095238095238,
						"acc_norm,none": 0.3238095238095238,
						"acc_norm_stderr,none": 0.04588414718067474,
						"acc_stderr,none": 0.04588414718067474,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.22285714285714286,
						"acc_norm,none": 0.22285714285714286,
						"acc_norm_stderr,none": 0.0315492532959613,
						"acc_stderr,none": 0.0315492532959613,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27488151658767773,
						"acc_norm,none": 0.27488151658767773,
						"acc_norm_stderr,none": 0.03080829112478032,
						"acc_stderr,none": 0.03080829112478032,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2526595744680851,
						"acc_norm,none": 0.2526595744680851,
						"acc_norm_stderr,none": 0.02243941258278639,
						"acc_stderr,none": 0.02243941258278639,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.032404930613749286,
						"acc_stderr,none": 0.032404930613749286,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03584022203803136,
						"acc_stderr,none": 0.03584022203803136,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03785714465066654,
						"acc_stderr,none": 0.03785714465066654,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3185840707964602,
						"acc_norm,none": 0.3185840707964602,
						"acc_norm_stderr,none": 0.031061820840326132,
						"acc_stderr,none": 0.031061820840326132,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2606060606060606,
						"acc_norm,none": 0.2606060606060606,
						"acc_norm_stderr,none": 0.034277431758165236,
						"acc_stderr,none": 0.034277431758165236,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.03253020905593337,
						"acc_stderr,none": 0.03253020905593337,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3136094674556213,
						"acc_norm,none": 0.3136094674556213,
						"acc_norm_stderr,none": 0.035795265164562245,
						"acc_stderr,none": 0.035795265164562245,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3416149068322981,
						"acc_norm,none": 0.3416149068322981,
						"acc_norm_stderr,none": 0.03749284617282493,
						"acc_stderr,none": 0.03749284617282493,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.036342189215581536,
						"acc_stderr,none": 0.036342189215581536,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.03640174058932784,
						"mcc_stderr,none": 0.034096375067676925
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.03487350880197771,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5093895114617711,
						"acc_stderr,none": 0.09360226157209069,
						"alias": "glue",
						"f1,none": 0.6334377064699201,
						"f1_stderr,none": 0.00031936062925547746,
						"mcc,none": 0.03640174058932784,
						"mcc_stderr,none": 0.0011625627927557005
					},
					"hellaswag": {
						"acc,none": 0.5273849830711014,
						"acc_norm,none": 0.7071300537741486,
						"acc_norm_stderr,none": 0.004541492151639223,
						"acc_stderr,none": 0.004982291744069926,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7152144381913449,
						"acc_stderr,none": 0.02025464229078477,
						"alias": "lambada",
						"perplexity,none": 3.7142611589589056,
						"perplexity_stderr,none": 0.2798699028664125
					},
					"lambada_multilingual": {
						"acc,none": 0.5422860469629343,
						"acc_stderr,none": 0.0882549852322722,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.57589708252773,
						"perplexity_stderr,none": 8.64787284286979
					},
					"lambada_openai": {
						"acc,none": 0.7556763050650107,
						"acc_stderr,none": 0.0059863591547287456,
						"alias": " - lambada_openai",
						"perplexity,none": 3.1790488450061614,
						"perplexity_stderr,none": 0.06259688004835295
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4246070250339608,
						"acc_stderr,none": 0.006886331702011292,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 35.27995596877975,
						"perplexity_stderr,none": 2.0057097656963543
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7539297496603921,
						"acc_stderr,none": 0.0060007712081583,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.178695251072187,
						"perplexity_stderr,none": 0.06257055517964322
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4478944304288764,
						"acc_stderr,none": 0.006928049276239777,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 31.18708971811688,
						"perplexity_stderr,none": 1.5802795866360644
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5552105569571124,
						"acc_stderr,none": 0.006923379948184627,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.36012034244943,
						"perplexity_stderr,none": 0.8105072304807965
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5297884727343295,
						"acc_stderr,none": 0.006953604103874037,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.873624132220396,
						"perplexity_stderr,none": 1.1933478878798167
					},
					"lambada_standard": {
						"acc,none": 0.676693188433922,
						"acc_stderr,none": 0.006516515049707146,
						"alias": " - lambada_standard",
						"perplexity,none": 4.251759042799803,
						"perplexity_stderr,none": 0.09101393569051683
					},
					"logiqa": {
						"acc,none": 0.2411674347158218,
						"acc_norm,none": 0.2780337941628264,
						"acc_norm_stderr,none": 0.017573187770282706,
						"acc_stderr,none": 0.016779369344911064,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3157669847600057,
						"acc_stderr,none": 0.053837382580476996,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768079,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.2814814814814815,
						"acc_stderr,none": 0.03885004245800254,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3026315789473684,
						"acc_stderr,none": 0.03738520676119668,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.36981132075471695,
						"acc_stderr,none": 0.02971142188010792,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.03852084696008534,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816507,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036844,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.31213872832369943,
						"acc_stderr,none": 0.03533133389323657,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.04280105837364395,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.33617021276595743,
						"acc_stderr,none": 0.030881618520676942,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2894736842105263,
						"acc_stderr,none": 0.042663394431593935,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.3586206896551724,
						"acc_stderr,none": 0.03996629574876718,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2566137566137566,
						"acc_stderr,none": 0.022494510767503154,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.0404061017820884,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.36129032258064514,
						"acc_stderr,none": 0.027327548447957546,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.22660098522167488,
						"acc_stderr,none": 0.029454863835292975,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3575757575757576,
						"acc_stderr,none": 0.03742597043806585,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3787878787878788,
						"acc_stderr,none": 0.03456088731993747,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.41968911917098445,
						"acc_stderr,none": 0.03561587327685884,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2948717948717949,
						"acc_stderr,none": 0.02311936275823229,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.22962962962962963,
						"acc_stderr,none": 0.02564410863926761,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2773109243697479,
						"acc_stderr,none": 0.029079374539480007,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2119205298013245,
						"acc_stderr,none": 0.033367670865679766,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3357798165137615,
						"acc_stderr,none": 0.020248081396752927,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.18518518518518517,
						"acc_stderr,none": 0.026491914727355168,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4264705882352941,
						"acc_stderr,none": 0.03471157907953425,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.34177215189873417,
						"acc_stderr,none": 0.030874537537553617,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.34977578475336324,
						"acc_stderr,none": 0.03200736719484503,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.32061068702290074,
						"acc_stderr,none": 0.04093329229834278,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.31137088204038255,
						"acc_stderr,none": 0.05034350770386185,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.3140495867768595,
						"acc_stderr,none": 0.042369647530410184,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.35185185185185186,
						"acc_stderr,none": 0.046166311118017125,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3374233128834356,
						"acc_stderr,none": 0.037149084099355745,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.26785714285714285,
						"acc_stderr,none": 0.04203277291467764,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.3592233009708738,
						"acc_stderr,none": 0.04750458399041694,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3974358974358974,
						"acc_stderr,none": 0.032059534537892925,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252604,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4086845466155811,
						"acc_stderr,none": 0.017579250148153397,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3554913294797688,
						"acc_stderr,none": 0.025770292082977254,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23687150837988827,
						"acc_stderr,none": 0.014219570788103986,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3235294117647059,
						"acc_stderr,none": 0.02678745311190654,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.34985516575474734,
						"acc_stderr,none": 0.04341556800869804,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3954983922829582,
						"acc_stderr,none": 0.027770918531427838,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3395061728395062,
						"acc_stderr,none": 0.026348564412011635,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2553191489361702,
						"acc_stderr,none": 0.026011992930902,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.28226857887874834,
						"acc_stderr,none": 0.01149585217624194,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3014705882352941,
						"acc_stderr,none": 0.027875982114273168,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3022875816993464,
						"acc_stderr,none": 0.01857923271111388,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.0469237132203465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3510204081632653,
						"acc_stderr,none": 0.030555316755573637,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3357166070848229,
						"acc_stderr,none": 0.045722398803210426,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.39303482587064675,
						"acc_stderr,none": 0.0345368246603156,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.269267364414843,
						"acc_stderr,none": 0.05591862906927716,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.04988876515698589,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.30120481927710846,
						"acc_stderr,none": 0.0357160923005348,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.4093567251461988,
						"acc_stderr,none": 0.037712831076265434,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3574121242995415,
						"acc_stderr,none": 0.004837576847532127,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3470301057770545,
						"acc_stderr,none": 0.004800995593412548,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7156862745098039,
						"acc_stderr,none": 0.022359549679883524,
						"alias": " - mrpc",
						"f1,none": 0.8263473053892215,
						"f1_stderr,none": 0.015892424268190306
					},
					"openbookqa": {
						"acc,none": 0.312,
						"acc_norm,none": 0.406,
						"acc_norm_stderr,none": 0.02198396209008634,
						"acc_stderr,none": 0.020740596536488062,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4125,
						"acc_stderr,none": 0.01101056271248756,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.356,
						"acc_stderr,none": 0.010709311120344539,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3895,
						"acc_stderr,none": 0.010906619649373086,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5455,
						"acc_stderr,none": 0.011136735987003717,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5195,
						"acc_stderr,none": 0.011174628009718142,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.481,
						"acc_stderr,none": 0.011175058879956061,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4935,
						"acc_stderr,none": 0.0111821910061423,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4567857142857143,
						"acc_stderr,none": 0.05475184126493959,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.764961915125136,
						"acc_norm,none": 0.7725788900979326,
						"acc_norm_stderr,none": 0.009779850767847232,
						"acc_stderr,none": 0.009893146688805334,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7402196165069074,
						"acc_norm,none": 0.6355504235769488,
						"acc_norm_stderr,none": 0.009836211175315098,
						"acc_stderr,none": 0.1447341919531007,
						"alias": "pythia",
						"bits_per_byte,none": 0.6422625445973058,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5607749675286307,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.1790488450061614,
						"perplexity_stderr,none": 0.06259688004835295,
						"word_perplexity,none": 10.811293630189443,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.4966135822807981,
						"acc_stderr,none": 0.006765255380909209,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.576675735839723,
						"acc_stderr,none": 0.0024572872125706116,
						"alias": " - qqp",
						"f1,none": 0.6318008734376008,
						"f1_stderr,none": 0.00261189258077531
					},
					"record": {
						"alias": "record",
						"em,none": 0.2717,
						"em_stderr,none": 0.004448582919199869,
						"f1,none": 0.281435238340497,
						"f1_stderr,none": 0.004459404928595226
					},
					"rte": {
						"acc,none": 0.5848375451263538,
						"acc_stderr,none": 0.029660066290893485,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.945,
						"acc_norm,none": 0.93,
						"acc_norm_stderr,none": 0.008072494358323494,
						"acc_stderr,none": 0.007212976294639239,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9128440366972477,
						"acc_stderr,none": 0.009557356094989465,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3144972512040194,
						"acc_stderr,none": 0.0013827631630629258,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.29865361077111385,
						"bleu_acc_stderr,none": 0.016021570613768545,
						"bleu_diff,none": -8.513877363517,
						"bleu_diff_stderr,none": 0.855612434003918,
						"bleu_max,none": 26.04555550224924,
						"bleu_max_stderr,none": 0.8072980593399709,
						"rouge1_acc,none": 0.2729498164014688,
						"rouge1_acc_stderr,none": 0.015594753632006518,
						"rouge1_diff,none": -10.736743776020543,
						"rouge1_diff_stderr,none": 0.9052863531466151,
						"rouge1_max,none": 50.45238788723216,
						"rouge1_max_stderr,none": 0.8982794935027536,
						"rouge2_acc,none": 0.23745410036719705,
						"rouge2_acc_stderr,none": 0.014896277441041843,
						"rouge2_diff,none": -12.957575841799134,
						"rouge2_diff_stderr,none": 1.0983477121627219,
						"rouge2_max,none": 34.336855746687114,
						"rouge2_max_stderr,none": 1.0262250402818764,
						"rougeL_acc,none": 0.2692778457772338,
						"rougeL_acc_stderr,none": 0.01552856663708729,
						"rougeL_diff,none": -11.034263329997641,
						"rougeL_diff_stderr,none": 0.9226370448066114,
						"rougeL_max,none": 47.57654323645,
						"rougeL_max_stderr,none": 0.9116157724908629
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.29865361077111385,
						"bleu_acc_stderr,none": 0.016021570613768545,
						"bleu_diff,none": -8.513877363517,
						"bleu_diff_stderr,none": 0.855612434003918,
						"bleu_max,none": 26.04555550224924,
						"bleu_max_stderr,none": 0.8072980593399709,
						"rouge1_acc,none": 0.2729498164014688,
						"rouge1_acc_stderr,none": 0.015594753632006518,
						"rouge1_diff,none": -10.736743776020543,
						"rouge1_diff_stderr,none": 0.9052863531466151,
						"rouge1_max,none": 50.45238788723216,
						"rouge1_max_stderr,none": 0.8982794935027536,
						"rouge2_acc,none": 0.23745410036719705,
						"rouge2_acc_stderr,none": 0.014896277441041843,
						"rouge2_diff,none": -12.957575841799134,
						"rouge2_diff_stderr,none": 1.0983477121627219,
						"rouge2_max,none": 34.336855746687114,
						"rouge2_max_stderr,none": 1.0262250402818764,
						"rougeL_acc,none": 0.2692778457772338,
						"rougeL_acc_stderr,none": 0.01552856663708729,
						"rougeL_diff,none": -11.034263329997641,
						"rougeL_diff_stderr,none": 0.9226370448066114,
						"rougeL_max,none": 47.57654323645,
						"rougeL_max_stderr,none": 0.9116157724908629
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2460220318237454,
						"acc_stderr,none": 0.015077219200662588,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3829724705842934,
						"acc_stderr,none": 0.013877948050264086,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6422625445973058,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5607749675286307,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.811293630189443,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6803472770323599,
						"acc_stderr,none": 0.013106528517665146,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4225352112676056,
						"acc_stderr,none": 0.0590398420568258,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.38461538461538464,
						"acc_stderr,none": 0.0479366886807504,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.610909090909091,
						"acc_stderr,none": 0.07050016964403515,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.02206494331392886,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.020099950647503237,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.734,
						"acc_stderr,none": 0.019780559675655486,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.022382357781962132,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.02227969410784342,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.02216263442665284,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.022221331534143025,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.02168382753928613,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.676,
						"acc_stderr,none": 0.020950557312477455,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.020704041021724795,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4312182061579652,
						"acc_stderr,none": 0.052715768158511686,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.336144578313253,
						"acc_stderr,none": 0.009468634669293536,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4714859437751004,
						"acc_stderr,none": 0.010005762674605288,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.491566265060241,
						"acc_stderr,none": 0.010020647068114176,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3819277108433735,
						"acc_stderr,none": 0.00973862791451752,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5373493975903615,
						"acc_stderr,none": 0.009994072620561407,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4939759036144578,
						"acc_stderr,none": 0.010021345444047586,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4939759036144578,
						"acc_stderr,none": 0.010021345444047586,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41566265060240964,
						"acc_stderr,none": 0.009878474341822914,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4799196787148594,
						"acc_stderr,none": 0.010013987419234083,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3522088353413655,
						"acc_stderr,none": 0.009574259292495753,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3751004016064257,
						"acc_stderr,none": 0.009704349720814054,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.46224899598393576,
						"acc_stderr,none": 0.009993466360872784,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.39598393574297186,
						"acc_stderr,none": 0.009802809888502356,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42088353413654617,
						"acc_stderr,none": 0.009895812914052197,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3598393574297189,
						"acc_stderr,none": 0.00962025021776598,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6166295650081223,
						"acc_stderr,none": 0.06563719147480171,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5843812045003309,
						"acc_stderr,none": 0.012682569054907632,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7743216412971542,
						"acc_stderr,none": 0.010757644351686557,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7068166776968895,
						"acc_stderr,none": 0.011714791177625769,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5499669093315684,
						"acc_stderr,none": 0.012802713598219839,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5804103242885507,
						"acc_stderr,none": 0.012699642268200759,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6525479814692257,
						"acc_stderr,none": 0.012253641527935306,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5095962938451357,
						"acc_stderr,none": 0.01286475526040896,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6724023825281271,
						"acc_stderr,none": 0.01207804144298347,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5493050959629384,
						"acc_stderr,none": 0.012804412720126668,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5638649900727994,
						"acc_stderr,none": 0.01276173043143577,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6393117140966248,
						"acc_stderr,none": 0.012357592682139025,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8015284333558103,
						"acc_stderr,none": 0.038247518612836776,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8623655913978494,
						"acc_stderr,none": 0.007146463148274177,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6867469879518072,
						"acc_stderr,none": 0.05121994210658146,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7361835245046924,
						"acc_stderr,none": 0.014238401966368419,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7756653992395437,
						"acc_stderr,none": 0.02577120320708472,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6603174603174603,
						"acc_stderr,none": 0.026726874754294024,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7658730158730159,
						"acc_stderr,none": 0.01888078848507831,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/Quetzal-N8-1"
	},
	"./rwkv-x-dev/R4-1B5-No-Mask_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5146561443066516,
						"acc_norm,none": 0.4943630214205186,
						"acc_norm_stderr,none": 0.07766833744215614,
						"acc_stderr,none": 0.10900546412422053,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.341875,
						"acc_stderr,none": 0.015458296164085977,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8338507462686569,
						"acc_stderr,none": 0.1485411939935204,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.251683647038508,
						"acc_norm,none": 0.251683647038508,
						"acc_norm_stderr,none": 0.03986411235722884,
						"acc_stderr,none": 0.03986411235722884,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5245950452596473,
						"acc_stderr,none": 0.009993154159449867,
						"alias": "glue",
						"f1,none": 0.3795183073110856,
						"f1_stderr,none": 0.0002946596918553634,
						"mcc,none": 0.014578675424356067,
						"mcc_stderr,none": 0.030113358110865577
					},
					"lambada": {
						"acc,none": 0.6241024645837376,
						"acc_stderr,none": 0.022288483099121882,
						"alias": "lambada",
						"perplexity,none": 5.935217293691965,
						"perplexity_stderr,none": 0.5501859464304935
					},
					"lambada_multilingual": {
						"acc,none": 0.45429846691247816,
						"acc_stderr,none": 0.08524015745632813,
						"alias": "lambada_multilingual",
						"perplexity,none": 42.02546325237643,
						"perplexity_stderr,none": 16.27720474471234
					},
					"mmlu": {
						"acc,none": 0.26128756587380714,
						"acc_stderr,none": 0.04361422279534824,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.24378320935175346,
						"acc_stderr,none": 0.027982215511159144,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.25619568715803026,
						"acc_stderr,none": 0.059019917566149,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2850178745531362,
						"acc_stderr,none": 0.03579395330185572,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.269267364414843,
						"acc_stderr,none": 0.04691000557146146,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.5220714285714285,
						"acc_stderr,none": 0.028455702845987217,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.71852150640112,
						"acc_norm,none": 0.5016600030348698,
						"acc_norm_stderr,none": 0.008544119159139205,
						"acc_stderr,none": 0.1465878494359167,
						"alias": "pythia",
						"bits_per_byte,none": 0.721468813847825,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.648859891417044,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.879813688154181,
						"perplexity_stderr,none": 0.11420191073163823,
						"word_perplexity,none": 14.500380668864032,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3069425669801873,
						"acc_stderr,none": 0.0017751398606458272,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31334149326805383,
						"bleu_acc_stderr,none": 0.016238065069059615,
						"bleu_diff,none": -5.375259049669378,
						"bleu_diff_stderr,none": 0.7532335432415257,
						"bleu_max,none": 23.004380203075616,
						"bleu_max_stderr,none": 0.7500613051567661,
						"rouge1_acc,none": 0.2741738066095471,
						"rouge1_acc_stderr,none": 0.015616518497219385,
						"rouge1_diff,none": -7.8523967712140825,
						"rouge1_diff_stderr,none": 0.8453782512061513,
						"rouge1_max,none": 46.94158422542939,
						"rouge1_max_stderr,none": 0.8732629724233246,
						"rouge2_acc,none": 0.23990208078335373,
						"rouge2_acc_stderr,none": 0.014948812679062133,
						"rouge2_diff,none": -9.417391598834786,
						"rouge2_diff_stderr,none": 0.9863539101388346,
						"rouge2_max,none": 30.313054257572936,
						"rouge2_max_stderr,none": 0.9761563529945856,
						"rougeL_acc,none": 0.2668298653610771,
						"rougeL_acc_stderr,none": 0.015483691939237276,
						"rougeL_diff,none": -7.933808504950493,
						"rougeL_diff_stderr,none": 0.8475940649487954,
						"rougeL_max,none": 44.24756523632106,
						"rougeL_max_stderr,none": 0.8849773323504441
					},
					"xcopa": {
						"acc,none": 0.5774545454545456,
						"acc_stderr,none": 0.045456488091206115,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4074698795180723,
						"acc_stderr,none": 0.04727570886141975,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5784850490343542,
						"acc_stderr,none": 0.05550458006042207,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7354461676781299,
						"acc_stderr,none": 0.04498732776408285,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5146561443066516,
						"acc_norm,none": 0.4943630214205186,
						"acc_norm_stderr,none": 0.07766833744215614,
						"acc_stderr,none": 0.10900546412422053,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.341875,
						"acc_stderr,none": 0.015458296164085977,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.351,
						"acc_stderr,none": 0.015100563798316405,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.014876872027456725,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3441666666666667,
						"acc_stderr,none": 0.013720551062295755,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.2841296928327645,
						"acc_norm,none": 0.3310580204778157,
						"acc_norm_stderr,none": 0.01375206241981783,
						"acc_stderr,none": 0.013179442447653887,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6283670033670034,
						"acc_norm,none": 0.5749158249158249,
						"acc_norm_stderr,none": 0.010143966195717842,
						"acc_stderr,none": 0.009915897123658791,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8338507462686569,
						"acc_stderr,none": 0.1485411939935204,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662728,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045057,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329811,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.795,
						"acc_stderr,none": 0.012772554096113125,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783212,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.013232501619085341,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.656,
						"acc_stderr,none": 0.015029633724408947,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.745,
						"acc_stderr,none": 0.013790038620872832,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.0111717862854965,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346933,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767554,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.006199874066337059,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380718,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.0061998740663370645,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315157,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753651,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910637,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855748,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.798,
						"acc_stderr,none": 0.01270265158765513,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099182,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.01120341539516033,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524317,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160331,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.291,
						"acc_stderr,none": 0.014370995982377947,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731987,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.01447084674113471,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.011171786285496497,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074796,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280308,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315148,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.455,
						"acc_stderr,none": 0.015755101498347093,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.837,
						"acc_stderr,none": 0.011686212712746828,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.678,
						"acc_stderr,none": 0.014782913600996676,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.573,
						"acc_stderr,none": 0.015649789644462214,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.665,
						"acc_stderr,none": 0.014933117490932577,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410036,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.719,
						"acc_stderr,none": 0.01422115470843493,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248128,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651528,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.835,
						"acc_stderr,none": 0.011743632866916159,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118783,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.002637794146243775,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525052,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.77,
						"acc_stderr,none": 0.01331455133593595,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.469,
						"acc_stderr,none": 0.015788865959539006,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.005814534272734963,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745894,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482566024,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.766,
						"acc_stderr,none": 0.013394902889660006,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.478,
						"acc_stderr,none": 0.015803979428161953,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.01107581480856704,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.013877773329774164,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.014078856992462621,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992443,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785122,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.764,
						"acc_stderr,none": 0.013434451402438692,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235261,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295434,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340975,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099205,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.006199874066337071,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.474,
						"acc_stderr,none": 0.01579789775804277,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.015615500115072956,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.251683647038508,
						"acc_norm,none": 0.251683647038508,
						"acc_norm_stderr,none": 0.03986411235722884,
						"acc_stderr,none": 0.03986411235722884,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.032505932874173686,
						"acc_stderr,none": 0.032505932874173686,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.23648648648648649,
						"acc_norm,none": 0.23648648648648649,
						"acc_norm_stderr,none": 0.03504716241250434,
						"acc_stderr,none": 0.03504716241250434,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.03563788836258829,
						"acc_stderr,none": 0.03563788836258829,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.033464098810559534,
						"acc_stderr,none": 0.033464098810559534,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.22966507177033493,
						"acc_norm,none": 0.22966507177033493,
						"acc_norm_stderr,none": 0.02916457171241446,
						"acc_stderr,none": 0.02916457171241446,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2595419847328244,
						"acc_norm,none": 0.2595419847328244,
						"acc_norm_stderr,none": 0.03844876139785271,
						"acc_stderr,none": 0.03844876139785271,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.03921568627450977,
						"acc_stderr,none": 0.03921568627450977,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2616822429906542,
						"acc_norm,none": 0.2616822429906542,
						"acc_norm_stderr,none": 0.04269291915728109,
						"acc_stderr,none": 0.04269291915728109,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2476780185758514,
						"acc_norm,none": 0.2476780185758514,
						"acc_norm_stderr,none": 0.024055681892974835,
						"acc_stderr,none": 0.024055681892974835,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604257,
						"acc_stderr,none": 0.030587591351604257,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.22346368715083798,
						"acc_norm,none": 0.22346368715083798,
						"acc_norm_stderr,none": 0.031222980919579764,
						"acc_stderr,none": 0.031222980919579764,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.028304657943035296,
						"acc_stderr,none": 0.028304657943035296,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.04084247315337099,
						"acc_stderr,none": 0.04084247315337099,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.37383177570093457,
						"acc_norm,none": 0.37383177570093457,
						"acc_norm_stderr,none": 0.04699273118994851,
						"acc_stderr,none": 0.04699273118994851,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.043960933774393765,
						"acc_stderr,none": 0.043960933774393765,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04186091791394607,
						"acc_stderr,none": 0.04186091791394607,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.04350546818999062,
						"acc_stderr,none": 0.04350546818999062,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.25274725274725274,
						"acc_norm,none": 0.25274725274725274,
						"acc_norm_stderr,none": 0.026350722655564394,
						"acc_stderr,none": 0.026350722655564394,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.25980392156862747,
						"acc_norm,none": 0.25980392156862747,
						"acc_norm_stderr,none": 0.03077855467869326,
						"acc_stderr,none": 0.03077855467869326,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.30409356725146197,
						"acc_norm,none": 0.30409356725146197,
						"acc_norm_stderr,none": 0.03528211258245232,
						"acc_stderr,none": 0.03528211258245232,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.25170068027210885,
						"acc_norm,none": 0.25170068027210885,
						"acc_norm_stderr,none": 0.03591728013761648,
						"acc_stderr,none": 0.03591728013761648,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2014388489208633,
						"acc_norm,none": 0.2014388489208633,
						"acc_norm_stderr,none": 0.034141780162351726,
						"acc_stderr,none": 0.034141780162351726,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.23270440251572327,
						"acc_norm,none": 0.23270440251572327,
						"acc_norm_stderr,none": 0.03361670240809546,
						"acc_stderr,none": 0.03361670240809546,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25766871165644173,
						"acc_norm,none": 0.25766871165644173,
						"acc_norm_stderr,none": 0.03436150827846917,
						"acc_stderr,none": 0.03436150827846917,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.23837209302325582,
						"acc_norm,none": 0.23837209302325582,
						"acc_norm_stderr,none": 0.032583750685258935,
						"acc_stderr,none": 0.032583750685258935,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.24206349206349206,
						"acc_norm,none": 0.24206349206349206,
						"acc_norm_stderr,none": 0.02703610967923697,
						"acc_stderr,none": 0.02703610967923697,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.23232323232323232,
						"acc_norm,none": 0.23232323232323232,
						"acc_norm_stderr,none": 0.030088629490217483,
						"acc_stderr,none": 0.030088629490217483,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2773109243697479,
						"acc_norm,none": 0.2773109243697479,
						"acc_norm_stderr,none": 0.029079374539480007,
						"acc_stderr,none": 0.029079374539480007,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.028361099300075063,
						"acc_stderr,none": 0.028361099300075063,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.0391545063041425,
						"acc_stderr,none": 0.0391545063041425,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.26573426573426573,
						"acc_norm,none": 0.26573426573426573,
						"acc_norm_stderr,none": 0.03706860462623559,
						"acc_stderr,none": 0.03706860462623559,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032732683535398856,
						"acc_stderr,none": 0.032732683535398856,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2348993288590604,
						"acc_norm,none": 0.2348993288590604,
						"acc_norm_stderr,none": 0.034847315046501876,
						"acc_stderr,none": 0.034847315046501876,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.0374425492857706,
						"acc_stderr,none": 0.0374425492857706,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2711864406779661,
						"acc_norm,none": 0.2711864406779661,
						"acc_norm_stderr,none": 0.04110070549339208,
						"acc_stderr,none": 0.04110070549339208,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.22560975609756098,
						"acc_norm,none": 0.22560975609756098,
						"acc_norm_stderr,none": 0.03273897454566343,
						"acc_stderr,none": 0.03273897454566343,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.04265792110940588,
						"acc_stderr,none": 0.04265792110940588,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.24475524475524477,
						"acc_norm,none": 0.24475524475524477,
						"acc_norm_stderr,none": 0.03607993033081377,
						"acc_stderr,none": 0.03607993033081377,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2698412698412698,
						"acc_norm,none": 0.2698412698412698,
						"acc_norm_stderr,none": 0.03970158273235172,
						"acc_stderr,none": 0.03970158273235172,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.22162162162162163,
						"acc_norm,none": 0.22162162162162163,
						"acc_norm_stderr,none": 0.03061910799145734,
						"acc_stderr,none": 0.03061910799145734,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.23837209302325582,
						"acc_norm,none": 0.23837209302325582,
						"acc_norm_stderr,none": 0.03258375068525893,
						"acc_stderr,none": 0.03258375068525893,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25304136253041365,
						"acc_norm,none": 0.25304136253041365,
						"acc_norm_stderr,none": 0.0214709918533983,
						"acc_stderr,none": 0.0214709918533983,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.02976139583743598,
						"acc_stderr,none": 0.02976139583743598,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03887917804888517,
						"acc_stderr,none": 0.03887917804888517,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2459016393442623,
						"acc_norm,none": 0.2459016393442623,
						"acc_norm_stderr,none": 0.03914731903595733,
						"acc_stderr,none": 0.03914731903595733,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.029461344042368914,
						"acc_stderr,none": 0.029461344042368914,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03305282343736877,
						"acc_stderr,none": 0.03305282343736877,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2698412698412698,
						"acc_norm,none": 0.2698412698412698,
						"acc_norm_stderr,none": 0.03237307120120853,
						"acc_stderr,none": 0.03237307120120853,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.0399037253226882,
						"acc_stderr,none": 0.0399037253226882,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.296551724137931,
						"acc_norm,none": 0.296551724137931,
						"acc_norm_stderr,none": 0.038061426873099935,
						"acc_stderr,none": 0.038061426873099935,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.20952380952380953,
						"acc_norm,none": 0.20952380952380953,
						"acc_norm_stderr,none": 0.039906571509931855,
						"acc_stderr,none": 0.039906571509931855,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2342857142857143,
						"acc_norm,none": 0.2342857142857143,
						"acc_norm_stderr,none": 0.0321093603969262,
						"acc_stderr,none": 0.0321093603969262,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.24644549763033174,
						"acc_norm,none": 0.24644549763033174,
						"acc_norm_stderr,none": 0.029737751726596824,
						"acc_stderr,none": 0.029737751726596824,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.24202127659574468,
						"acc_norm,none": 0.24202127659574468,
						"acc_norm_stderr,none": 0.02211768392158699,
						"acc_stderr,none": 0.02211768392158699,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.22844827586206898,
						"acc_norm,none": 0.22844827586206898,
						"acc_norm_stderr,none": 0.027622976595715287,
						"acc_stderr,none": 0.027622976595715287,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.032534138484822554,
						"acc_stderr,none": 0.032534138484822554,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.0391545063041425,
						"acc_stderr,none": 0.0391545063041425,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.028952167450890815,
						"acc_stderr,none": 0.028952167450890815,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2606060606060606,
						"acc_norm,none": 0.2606060606060606,
						"acc_norm_stderr,none": 0.034277431758165236,
						"acc_stderr,none": 0.034277431758165236,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741764,
						"acc_stderr,none": 0.03351597731741764,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.034714607440589844,
						"acc_stderr,none": 0.034714607440589844,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.20625,
						"acc_norm,none": 0.20625,
						"acc_norm_stderr,none": 0.03208782538184615,
						"acc_stderr,none": 0.03208782538184615,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.014578675424356067,
						"mcc_stderr,none": 0.030113358110865577
					},
					"copa": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5245950452596473,
						"acc_stderr,none": 0.009993154159449867,
						"alias": "glue",
						"f1,none": 0.3795183073110856,
						"f1_stderr,none": 0.0002946596918553634,
						"mcc,none": 0.014578675424356067,
						"mcc_stderr,none": 0.030113358110865577
					},
					"hellaswag": {
						"acc,none": 0.424317864967138,
						"acc_norm,none": 0.5491933877713603,
						"acc_norm_stderr,none": 0.004965572246803866,
						"acc_stderr,none": 0.004932289405608944,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.6241024645837376,
						"acc_stderr,none": 0.022288483099121882,
						"alias": "lambada",
						"perplexity,none": 5.935217293691965,
						"perplexity_stderr,none": 0.5501859464304935
					},
					"lambada_multilingual": {
						"acc,none": 0.45429846691247816,
						"acc_stderr,none": 0.08524015745632813,
						"alias": "lambada_multilingual",
						"perplexity,none": 42.02546325237643,
						"perplexity_stderr,none": 16.27720474471234
					},
					"lambada_openai": {
						"acc,none": 0.6666019794294585,
						"acc_stderr,none": 0.006567905278379238,
						"alias": " - lambada_openai",
						"perplexity,none": 4.879813688154181,
						"perplexity_stderr,none": 0.11420191073163823
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.35358043857946825,
						"acc_stderr,none": 0.006660601226776446,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 63.151901680710374,
						"perplexity_stderr,none": 3.770221365561435
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6666019794294585,
						"acc_stderr,none": 0.006567905278379236,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.878706840028871,
						"perplexity_stderr,none": 0.1141903216354531
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.3679410052396662,
						"acc_stderr,none": 0.006718618417791317,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 62.15094979191024,
						"perplexity_stderr,none": 3.3919968469675297
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.4566272074519697,
						"acc_stderr,none": 0.006939719384611018,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 33.022638428860276,
						"perplexity_stderr,none": 1.7648309265482065
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.4267417038618281,
						"acc_stderr,none": 0.0068908023083824,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 46.92311952037235,
						"perplexity_stderr,none": 2.7113934248733935
					},
					"lambada_standard": {
						"acc,none": 0.5816029497380167,
						"acc_stderr,none": 0.0068725780402739975,
						"alias": " - lambada_standard",
						"perplexity,none": 6.99175178638294,
						"perplexity_stderr,none": 0.18489814891631345
					},
					"logiqa": {
						"acc,none": 0.24423963133640553,
						"acc_norm,none": 0.29339477726574503,
						"acc_norm_stderr,none": 0.017859032704399497,
						"acc_stderr,none": 0.016851689430077556,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.26128756587380714,
						"acc_stderr,none": 0.04361422279534824,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.037857144650666544,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3026315789473684,
						"acc_stderr,none": 0.03738520676119668,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3132075471698113,
						"acc_stderr,none": 0.02854479331905533,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.03745554791462457,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909282,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.0416333199893227,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.34104046242774566,
						"acc_stderr,none": 0.03614665424180826,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.35294117647058826,
						"acc_stderr,none": 0.04755129616062946,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.0261488180184245,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.0414243971948936,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2620689655172414,
						"acc_stderr,none": 0.036646663372252565,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2830687830687831,
						"acc_stderr,none": 0.023201392938194974,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2698412698412698,
						"acc_stderr,none": 0.039701582732351734,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322695,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.27419354838709675,
						"acc_stderr,none": 0.025378139970885196,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.270935960591133,
						"acc_stderr,none": 0.031270907132976984,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.23030303030303031,
						"acc_stderr,none": 0.0328766675860349,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.32323232323232326,
						"acc_stderr,none": 0.03332299921070644,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.29533678756476683,
						"acc_stderr,none": 0.03292296639155141,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.31025641025641026,
						"acc_stderr,none": 0.023454674889404295,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24444444444444444,
						"acc_stderr,none": 0.026202766534652148,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.31932773109243695,
						"acc_stderr,none": 0.030283995525884396,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2980132450331126,
						"acc_stderr,none": 0.03734535676787198,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.28623853211009176,
						"acc_stderr,none": 0.019379436628919965,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.30092592592592593,
						"acc_stderr,none": 0.03128039084329882,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.27450980392156865,
						"acc_stderr,none": 0.03132179803083291,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.24472573839662448,
						"acc_stderr,none": 0.027985699387036427,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.14349775784753363,
						"acc_stderr,none": 0.02352937126961819,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2366412213740458,
						"acc_stderr,none": 0.037276735755969195,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.24378320935175346,
						"acc_stderr,none": 0.027982215511159144,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.15702479338842976,
						"acc_stderr,none": 0.0332124484254713,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.21296296296296297,
						"acc_stderr,none": 0.0395783547198098,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.24539877300613497,
						"acc_stderr,none": 0.03380939813943354,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.19642857142857142,
						"acc_stderr,none": 0.03770970049347019,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4077669902912621,
						"acc_stderr,none": 0.048657775704107696,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2264957264957265,
						"acc_stderr,none": 0.027421007295392923,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.21583652618135377,
						"acc_stderr,none": 0.014711684386139932,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.23410404624277456,
						"acc_stderr,none": 0.022797110278071145,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24134078212290502,
						"acc_stderr,none": 0.014310999547961464,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2875816993464052,
						"acc_stderr,none": 0.02591780611714716,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.25619568715803026,
						"acc_stderr,none": 0.059019917566149,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2572347266881029,
						"acc_stderr,none": 0.024826171289250888,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.23765432098765432,
						"acc_stderr,none": 0.023683591837008564,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2127659574468085,
						"acc_stderr,none": 0.024414612974307703,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.25488917861799215,
						"acc_stderr,none": 0.011130509812662977,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3602941176470588,
						"acc_stderr,none": 0.029163128570670736,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.01740181671142766,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.04265792110940589,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.32653061224489793,
						"acc_stderr,none": 0.030021056238440307,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2850178745531362,
						"acc_stderr,none": 0.03579395330185572,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.2736318407960199,
						"acc_stderr,none": 0.03152439186555402,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.269267364414843,
						"acc_stderr,none": 0.04691000557146146,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.1927710843373494,
						"acc_stderr,none": 0.030709824050565267,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.19883040935672514,
						"acc_stderr,none": 0.030611116557432528,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3574121242995415,
						"acc_stderr,none": 0.00483757684753212,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.34316517493897475,
						"acc_stderr,none": 0.004788294368437754,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.3553921568627451,
						"acc_stderr,none": 0.02372490639698968,
						"alias": " - mrpc",
						"f1,none": 0.2006079027355623,
						"f1_stderr,none": 0.02948403117568553
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.002770083102493075,
						"exact_match_stderr,remove_whitespace": 0.0008748842684970852
					},
					"openbookqa": {
						"acc,none": 0.258,
						"acc_norm,none": 0.364,
						"acc_norm_stderr,none": 0.021539170637317695,
						"acc_stderr,none": 0.019586711785215837,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4905,
						"acc_stderr,none": 0.011181117282805231,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4615,
						"acc_stderr,none": 0.01114993432795706,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.5365,
						"acc_stderr,none": 0.011153298751334332,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5525,
						"acc_stderr,none": 0.011121318125943093,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.553,
						"acc_stderr,none": 0.011120131683767747,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.011175886999478619,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5425,
						"acc_stderr,none": 0.011142663706548619,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5220714285714285,
						"acc_stderr,none": 0.028455702845987217,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7138193688792165,
						"acc_norm,none": 0.7181719260065288,
						"acc_norm_stderr,none": 0.01049667523125816,
						"acc_stderr,none": 0.010545318576106643,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.71852150640112,
						"acc_norm,none": 0.5016600030348698,
						"acc_norm_stderr,none": 0.008544119159139205,
						"acc_stderr,none": 0.1465878494359167,
						"alias": "pythia",
						"bits_per_byte,none": 0.721468813847825,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.648859891417044,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.879813688154181,
						"perplexity_stderr,none": 0.11420191073163823,
						"word_perplexity,none": 14.500380668864032,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.5048508145707487,
						"acc_stderr,none": 0.006765092158624681,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6079891169923324,
						"acc_stderr,none": 0.0024280099665805253,
						"alias": " - qqp",
						"f1,none": 0.38106767680712306,
						"f1_stderr,none": 0.003857116090409578
					},
					"record": {
						"alias": "record",
						"em,none": 0.2742,
						"em_stderr,none": 0.00446132562616015,
						"f1,none": 0.28280523830354215,
						"f1_stderr,none": 0.0044697291154412325
					},
					"rte": {
						"acc,none": 0.5018050541516246,
						"acc_stderr,none": 0.030096267148976633,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.901,
						"acc_norm,none": 0.867,
						"acc_norm_stderr,none": 0.010743669132397342,
						"acc_stderr,none": 0.009449248027662739,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.7970183486238532,
						"acc_stderr,none": 0.013628669913308692,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3069425669801873,
						"acc_stderr,none": 0.0017751398606458272,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31334149326805383,
						"bleu_acc_stderr,none": 0.016238065069059615,
						"bleu_diff,none": -5.375259049669378,
						"bleu_diff_stderr,none": 0.7532335432415257,
						"bleu_max,none": 23.004380203075616,
						"bleu_max_stderr,none": 0.7500613051567661,
						"rouge1_acc,none": 0.2741738066095471,
						"rouge1_acc_stderr,none": 0.015616518497219385,
						"rouge1_diff,none": -7.8523967712140825,
						"rouge1_diff_stderr,none": 0.8453782512061513,
						"rouge1_max,none": 46.94158422542939,
						"rouge1_max_stderr,none": 0.8732629724233246,
						"rouge2_acc,none": 0.23990208078335373,
						"rouge2_acc_stderr,none": 0.014948812679062133,
						"rouge2_diff,none": -9.417391598834786,
						"rouge2_diff_stderr,none": 0.9863539101388346,
						"rouge2_max,none": 30.313054257572936,
						"rouge2_max_stderr,none": 0.9761563529945856,
						"rougeL_acc,none": 0.2668298653610771,
						"rougeL_acc_stderr,none": 0.015483691939237276,
						"rougeL_diff,none": -7.933808504950493,
						"rougeL_diff_stderr,none": 0.8475940649487954,
						"rougeL_max,none": 44.24756523632106,
						"rougeL_max_stderr,none": 0.8849773323504441
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.31334149326805383,
						"bleu_acc_stderr,none": 0.016238065069059615,
						"bleu_diff,none": -5.375259049669378,
						"bleu_diff_stderr,none": 0.7532335432415257,
						"bleu_max,none": 23.004380203075616,
						"bleu_max_stderr,none": 0.7500613051567661,
						"rouge1_acc,none": 0.2741738066095471,
						"rouge1_acc_stderr,none": 0.015616518497219385,
						"rouge1_diff,none": -7.8523967712140825,
						"rouge1_diff_stderr,none": 0.8453782512061513,
						"rouge1_max,none": 46.94158422542939,
						"rouge1_max_stderr,none": 0.8732629724233246,
						"rouge2_acc,none": 0.23990208078335373,
						"rouge2_acc_stderr,none": 0.014948812679062133,
						"rouge2_diff,none": -9.417391598834786,
						"rouge2_diff_stderr,none": 0.9863539101388346,
						"rouge2_max,none": 30.313054257572936,
						"rouge2_max_stderr,none": 0.9761563529945856,
						"rougeL_acc,none": 0.2668298653610771,
						"rougeL_acc_stderr,none": 0.015483691939237276,
						"rougeL_diff,none": -7.933808504950493,
						"rougeL_diff_stderr,none": 0.8475940649487954,
						"rougeL_max,none": 44.24756523632106,
						"rougeL_max_stderr,none": 0.8849773323504441
					},
					"truthfulqa_mc1": {
						"acc,none": 0.22766217870257038,
						"acc_stderr,none": 0.014679255032111066,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.38622295525780426,
						"acc_stderr,none": 0.01379975691821399,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.721468813847825,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.648859891417044,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 14.500380668864032,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.5990528808208366,
						"acc_stderr,none": 0.01377397455494803,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5070422535211268,
						"acc_stderr,none": 0.05975550263548289,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.4326923076923077,
						"acc_stderr,none": 0.048818036870061955,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.5774545454545456,
						"acc_stderr,none": 0.045456488091206115,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.022221331534143015,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.64,
						"acc_stderr,none": 0.021487751089720526,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.634,
						"acc_stderr,none": 0.02156427685020162,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.562,
						"acc_stderr,none": 0.022210326363977417,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269955,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.02218721580302901,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.02226169729227013,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.626,
						"acc_stderr,none": 0.021660710347204484,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.646,
						"acc_stderr,none": 0.021407582047916447,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4074698795180723,
						"acc_stderr,none": 0.04727570886141975,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667055,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.43172690763052207,
						"acc_stderr,none": 0.009928203186112917,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.45863453815261046,
						"acc_stderr,none": 0.009987716412406571,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3714859437751004,
						"acc_stderr,none": 0.009685370571886117,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5140562248995983,
						"acc_stderr,none": 0.010018111813088548,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.45823293172690766,
						"acc_stderr,none": 0.009987044882812574,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4614457831325301,
						"acc_stderr,none": 0.009992234275993066,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3783132530120482,
						"acc_stderr,none": 0.009720735678557153,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.45502008032128516,
						"acc_stderr,none": 0.009981437307797264,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3437751004016064,
						"acc_stderr,none": 0.00952031050288293,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3963855421686747,
						"acc_stderr,none": 0.009804518520476658,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.39397590361445783,
						"acc_stderr,none": 0.009794163014906756,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3409638554216867,
						"acc_stderr,none": 0.009501591178361544,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42409638554216866,
						"acc_stderr,none": 0.00990591824499448,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3502008032128514,
						"acc_stderr,none": 0.00956171303816195,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5784850490343542,
						"acc_stderr,none": 0.05550458006042207,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5314361350099271,
						"acc_stderr,none": 0.012841668760976907,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7213765718067505,
						"acc_stderr,none": 0.011537224908075907,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6267372600926538,
						"acc_stderr,none": 0.012446911553527134,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5327597617471873,
						"acc_stderr,none": 0.012839477563855926,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.542686962276638,
						"acc_stderr,none": 0.01282014720425624,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6214427531436135,
						"acc_stderr,none": 0.012481818770003606,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.49172733289212445,
						"acc_stderr,none": 0.012865364020375405,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6187954996690933,
						"acc_stderr,none": 0.012498678850934097,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.513567174056916,
						"acc_stderr,none": 0.01286238758665008,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5691594970218399,
						"acc_stderr,none": 0.012743443034698402,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5936465916611515,
						"acc_stderr,none": 0.012639429420389871,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7354461676781299,
						"acc_stderr,none": 0.04498732776408285,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8129032258064516,
						"acc_stderr,none": 0.0080897390246567,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6131386861313869,
						"acc_stderr,none": 0.01573527205814044,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6653992395437263,
						"acc_stderr,none": 0.029151034153310378,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6571428571428571,
						"acc_stderr,none": 0.026786851659200927,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7003968253968254,
						"acc_stderr,none": 0.020424963888406058,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/R4-1B5-No-Mask_pth"
	},
	"./rwkv-x-dev/R4-1B5-With-Mask_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5143742953776775,
						"acc_norm,none": 0.49577226606538893,
						"acc_norm_stderr,none": 0.0779270848687408,
						"acc_stderr,none": 0.10608357002651421,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.338125,
						"acc_stderr,none": 0.015795876368770643,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.834044776119403,
						"acc_stderr,none": 0.1486902680408596,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2520290105335865,
						"acc_norm,none": 0.2520290105335865,
						"acc_norm_stderr,none": 0.03982393185641713,
						"acc_stderr,none": 0.03982393185641713,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5223320628870891,
						"acc_stderr,none": 0.009416411982736139,
						"alias": "glue",
						"f1,none": 0.43414811698961037,
						"f1_stderr,none": 0.000354350107398487,
						"mcc,none": 0.013952647384210289,
						"mcc_stderr,none": 0.030274423637635228
					},
					"lambada": {
						"acc,none": 0.623035125169804,
						"acc_stderr,none": 0.022428336704811407,
						"alias": "lambada",
						"perplexity,none": 5.959663214521177,
						"perplexity_stderr,none": 0.5583040352382076
					},
					"lambada_multilingual": {
						"acc,none": 0.45243547448088495,
						"acc_stderr,none": 0.08555420204631338,
						"alias": "lambada_multilingual",
						"perplexity,none": 42.19501337611109,
						"perplexity_stderr,none": 16.347052934156604
					},
					"mmlu": {
						"acc,none": 0.26192850021364483,
						"acc_stderr,none": 0.04361304463996174,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2463336875664187,
						"acc_stderr,none": 0.028230860495196464,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.25555197940135177,
						"acc_stderr,none": 0.0598280484378345,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.28501787455313615,
						"acc_stderr,none": 0.03617568514286364,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2689502061528704,
						"acc_stderr,none": 0.045880966709267286,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.5197142857142858,
						"acc_stderr,none": 0.029959301710343635,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7184053757437052,
						"acc_norm,none": 0.5029584596896837,
						"acc_norm_stderr,none": 0.00852840229158927,
						"acc_stderr,none": 0.14676731790570477,
						"alias": "pythia",
						"bits_per_byte,none": 0.7214621063785158,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6488522254508533,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.888536161067676,
						"perplexity_stderr,none": 0.11449593232837851,
						"word_perplexity,none": 14.500020169133325,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.30919078754499657,
						"acc_stderr,none": 0.001768209334740202,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3072215422276622,
						"bleu_acc_stderr,none": 0.016150201321323016,
						"bleu_diff,none": -5.356411909597049,
						"bleu_diff_stderr,none": 0.7441352853582645,
						"bleu_max,none": 22.58088264622209,
						"bleu_max_stderr,none": 0.7430416673569418,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.0155507783328429,
						"rouge1_diff,none": -7.893976907400489,
						"rouge1_diff_stderr,none": 0.8501987046287365,
						"rouge1_max,none": 46.41737588456714,
						"rouge1_max_stderr,none": 0.8769187951240192,
						"rouge2_acc,none": 0.2350061199510404,
						"rouge2_acc_stderr,none": 0.014843061507731618,
						"rouge2_diff,none": -9.44019531244224,
						"rouge2_diff_stderr,none": 0.9852642576547831,
						"rouge2_max,none": 29.77842751142627,
						"rouge2_max_stderr,none": 0.9730406055244815,
						"rougeL_acc,none": 0.26438188494492043,
						"rougeL_acc_stderr,none": 0.015438211119522517,
						"rougeL_diff,none": -8.02646858058999,
						"rougeL_diff_stderr,none": 0.8527645832343129,
						"rougeL_max,none": 43.720025060819225,
						"rougeL_max_stderr,none": 0.886414072611252
					},
					"xcopa": {
						"acc,none": 0.5780000000000001,
						"acc_stderr,none": 0.045385626894231174,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4060508701472557,
						"acc_stderr,none": 0.04685035504508295,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5793875218097587,
						"acc_stderr,none": 0.05600076349145379,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7356709372892785,
						"acc_stderr,none": 0.04481749630313415,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5143742953776775,
						"acc_norm,none": 0.49577226606538893,
						"acc_norm_stderr,none": 0.0779270848687408,
						"acc_stderr,none": 0.10608357002651421,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.338125,
						"acc_stderr,none": 0.015795876368770643,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.349,
						"acc_stderr,none": 0.015080663991563098,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.324,
						"acc_stderr,none": 0.014806864733738864,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3408333333333333,
						"acc_stderr,none": 0.013688600793296937,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.2901023890784983,
						"acc_norm,none": 0.3319112627986348,
						"acc_norm_stderr,none": 0.013760988200880533,
						"acc_stderr,none": 0.013261573677520759,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.625,
						"acc_norm,none": 0.5765993265993266,
						"acc_norm_stderr,none": 0.010138671005289054,
						"acc_stderr,none": 0.009933992677987828,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.834044776119403,
						"acc_stderr,none": 0.1486902680408596,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340976,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045057,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329811,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.798,
						"acc_stderr,none": 0.012702651587655137,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787723,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877364,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.015090650341444231,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.745,
						"acc_stderr,none": 0.013790038620872832,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.011266140684632164,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346933,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.002818500300504507,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118772,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427419,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584947,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244071,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942297,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611461,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946092,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.796,
						"acc_stderr,none": 0.012749374359024394,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.802,
						"acc_stderr,none": 0.012607733934175304,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.011297239823409286,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695801,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.011075814808567038,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.003148000938676766,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.289,
						"acc_stderr,none": 0.014341711358296195,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323487,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731987,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.014470846741134712,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.011075814808567038,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448795,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406727,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177549,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.452,
						"acc_stderr,none": 0.01574623586588068,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.011800434324644608,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.672,
						"acc_stderr,none": 0.014853842487270334,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.581,
						"acc_stderr,none": 0.015610338967577799,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.671,
						"acc_stderr,none": 0.014865395385928357,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275284,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.721,
						"acc_stderr,none": 0.01419015011761203,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248132,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745908,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.836,
						"acc_stderr,none": 0.011715000693181305,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426139,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329829,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662739,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.769,
						"acc_stderr,none": 0.01333479721693644,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.015790799515836763,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695456,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592072,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.003845749574503012,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.763,
						"acc_stderr,none": 0.013454070462577981,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.477,
						"acc_stderr,none": 0.015802554246726098,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378232,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.742,
						"acc_stderr,none": 0.013842963108656604,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.014046255632633916,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357796,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345679,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.762,
						"acc_stderr,none": 0.013473586661967222,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.01110798754893915,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706819,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525057,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679196,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584934,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.478,
						"acc_stderr,none": 0.01580397942816194,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.416,
						"acc_stderr,none": 0.015594460144140605,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2520290105335865,
						"acc_norm,none": 0.2520290105335865,
						"acc_norm_stderr,none": 0.03982393185641713,
						"acc_stderr,none": 0.03982393185641713,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.22485207100591717,
						"acc_norm,none": 0.22485207100591717,
						"acc_norm_stderr,none": 0.03220965704514523,
						"acc_stderr,none": 0.03220965704514523,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.22972972972972974,
						"acc_norm,none": 0.22972972972972974,
						"acc_norm_stderr,none": 0.03469536825407608,
						"acc_stderr,none": 0.03469536825407608,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.035637888362588285,
						"acc_stderr,none": 0.035637888362588285,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.03404916326237584,
						"acc_stderr,none": 0.03404916326237584,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2606060606060606,
						"acc_norm,none": 0.2606060606060606,
						"acc_norm_stderr,none": 0.03427743175816524,
						"acc_stderr,none": 0.03427743175816524,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.22966507177033493,
						"acc_norm,none": 0.22966507177033493,
						"acc_norm_stderr,none": 0.029164571712414468,
						"acc_stderr,none": 0.029164571712414468,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865142,
						"acc_stderr,none": 0.03462157845865142,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.26717557251908397,
						"acc_norm,none": 0.26717557251908397,
						"acc_norm_stderr,none": 0.03880848301082394,
						"acc_stderr,none": 0.03880848301082394,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.27941176470588236,
						"acc_norm,none": 0.27941176470588236,
						"acc_norm_stderr,none": 0.03861882389311725,
						"acc_stderr,none": 0.03861882389311725,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2616822429906542,
						"acc_norm,none": 0.2616822429906542,
						"acc_norm_stderr,none": 0.04269291915728109,
						"acc_stderr,none": 0.04269291915728109,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2476780185758514,
						"acc_norm,none": 0.2476780185758514,
						"acc_norm_stderr,none": 0.024055681892974835,
						"acc_stderr,none": 0.024055681892974835,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604257,
						"acc_stderr,none": 0.030587591351604257,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.22346368715083798,
						"acc_norm,none": 0.22346368715083798,
						"acc_norm_stderr,none": 0.031222980919579764,
						"acc_stderr,none": 0.031222980919579764,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24050632911392406,
						"acc_norm,none": 0.24050632911392406,
						"acc_norm_stderr,none": 0.027820781981149675,
						"acc_stderr,none": 0.027820781981149675,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.04084247315337099,
						"acc_stderr,none": 0.04084247315337099,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3644859813084112,
						"acc_norm,none": 0.3644859813084112,
						"acc_norm_stderr,none": 0.046746602211107734,
						"acc_stderr,none": 0.046746602211107734,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.043960933774393765,
						"acc_stderr,none": 0.043960933774393765,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.26851851851851855,
						"acc_norm,none": 0.26851851851851855,
						"acc_norm_stderr,none": 0.04284467968052192,
						"acc_stderr,none": 0.04284467968052192,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.04350546818999062,
						"acc_stderr,none": 0.04350546818999062,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.24175824175824176,
						"acc_norm,none": 0.24175824175824176,
						"acc_norm_stderr,none": 0.025960319996852693,
						"acc_stderr,none": 0.025960319996852693,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.25980392156862747,
						"acc_norm,none": 0.25980392156862747,
						"acc_norm_stderr,none": 0.03077855467869326,
						"acc_stderr,none": 0.03077855467869326,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2982456140350877,
						"acc_norm,none": 0.2982456140350877,
						"acc_norm_stderr,none": 0.03508771929824563,
						"acc_stderr,none": 0.03508771929824563,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2014388489208633,
						"acc_norm,none": 0.2014388489208633,
						"acc_norm_stderr,none": 0.034141780162351726,
						"acc_stderr,none": 0.034141780162351726,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.2389937106918239,
						"acc_norm,none": 0.2389937106918239,
						"acc_norm_stderr,none": 0.033928043452896316,
						"acc_stderr,none": 0.033928043452896316,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.26380368098159507,
						"acc_norm,none": 0.26380368098159507,
						"acc_norm_stderr,none": 0.03462419931615623,
						"acc_stderr,none": 0.03462419931615623,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.23837209302325582,
						"acc_norm,none": 0.23837209302325582,
						"acc_norm_stderr,none": 0.032583750685258935,
						"acc_stderr,none": 0.032583750685258935,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.23015873015873015,
						"acc_norm,none": 0.23015873015873015,
						"acc_norm_stderr,none": 0.026569137736133546,
						"acc_stderr,none": 0.026569137736133546,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2474747474747475,
						"acc_norm,none": 0.2474747474747475,
						"acc_norm_stderr,none": 0.03074630074212449,
						"acc_stderr,none": 0.03074630074212449,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2773109243697479,
						"acc_norm,none": 0.2773109243697479,
						"acc_norm_stderr,none": 0.029079374539480007,
						"acc_stderr,none": 0.029079374539480007,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.028361099300075063,
						"acc_stderr,none": 0.028361099300075063,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.24431818181818182,
						"acc_norm,none": 0.24431818181818182,
						"acc_norm_stderr,none": 0.03248092256353737,
						"acc_stderr,none": 0.03248092256353737,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2348993288590604,
						"acc_norm,none": 0.2348993288590604,
						"acc_norm_stderr,none": 0.03484731504650188,
						"acc_stderr,none": 0.03484731504650188,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.032505932874173686,
						"acc_stderr,none": 0.032505932874173686,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2627118644067797,
						"acc_norm,none": 0.2627118644067797,
						"acc_norm_stderr,none": 0.04068792432070351,
						"acc_stderr,none": 0.04068792432070351,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.22560975609756098,
						"acc_norm,none": 0.22560975609756098,
						"acc_norm_stderr,none": 0.03273897454566343,
						"acc_stderr,none": 0.03273897454566343,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2636363636363636,
						"acc_norm,none": 0.2636363636363636,
						"acc_norm_stderr,none": 0.04220224692971987,
						"acc_stderr,none": 0.04220224692971987,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.03535681229053241,
						"acc_stderr,none": 0.03535681229053241,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.039325376803928704,
						"acc_stderr,none": 0.039325376803928704,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.23783783783783785,
						"acc_norm,none": 0.23783783783783785,
						"acc_norm_stderr,none": 0.0313873936833048,
						"acc_stderr,none": 0.0313873936833048,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.22674418604651161,
						"acc_norm,none": 0.22674418604651161,
						"acc_norm_stderr,none": 0.03202075899584939,
						"acc_stderr,none": 0.03202075899584939,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25790754257907544,
						"acc_norm,none": 0.25790754257907544,
						"acc_norm_stderr,none": 0.021605737836583275,
						"acc_stderr,none": 0.021605737836583275,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2570093457943925,
						"acc_norm,none": 0.2570093457943925,
						"acc_norm_stderr,none": 0.02994169153324464,
						"acc_stderr,none": 0.02994169153324464,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.23577235772357724,
						"acc_norm,none": 0.23577235772357724,
						"acc_norm_stderr,none": 0.038430664952148384,
						"acc_stderr,none": 0.038430664952148384,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2540983606557377,
						"acc_norm,none": 0.2540983606557377,
						"acc_norm_stderr,none": 0.03957756102798664,
						"acc_stderr,none": 0.03957756102798664,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.029461344042368914,
						"acc_stderr,none": 0.029461344042368914,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03305282343736877,
						"acc_stderr,none": 0.03305282343736877,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2751322751322751,
						"acc_norm,none": 0.2751322751322751,
						"acc_norm_stderr,none": 0.032570260086303135,
						"acc_stderr,none": 0.032570260086303135,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.0399037253226882,
						"acc_stderr,none": 0.0399037253226882,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.038552896163789485,
						"acc_stderr,none": 0.038552896163789485,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.20952380952380953,
						"acc_norm,none": 0.20952380952380953,
						"acc_norm_stderr,none": 0.03990657150993186,
						"acc_stderr,none": 0.03990657150993186,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2342857142857143,
						"acc_norm,none": 0.2342857142857143,
						"acc_norm_stderr,none": 0.0321093603969262,
						"acc_stderr,none": 0.0321093603969262,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.24644549763033174,
						"acc_norm,none": 0.24644549763033174,
						"acc_norm_stderr,none": 0.029737751726596824,
						"acc_stderr,none": 0.029737751726596824,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2393617021276596,
						"acc_norm,none": 0.2393617021276596,
						"acc_norm_stderr,none": 0.022034377848093516,
						"acc_stderr,none": 0.022034377848093516,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.23706896551724138,
						"acc_norm,none": 0.23706896551724138,
						"acc_norm_stderr,none": 0.02798169400862498,
						"acc_stderr,none": 0.02798169400862498,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2471264367816092,
						"acc_norm,none": 0.2471264367816092,
						"acc_norm_stderr,none": 0.03279424038543969,
						"acc_stderr,none": 0.03279424038543969,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.25663716814159293,
						"acc_norm,none": 0.25663716814159293,
						"acc_norm_stderr,none": 0.029118495998237286,
						"acc_stderr,none": 0.029118495998237286,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03346409881055952,
						"acc_stderr,none": 0.03346409881055952,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741764,
						"acc_stderr,none": 0.03351597731741764,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.27218934911242604,
						"acc_norm,none": 0.27218934911242604,
						"acc_norm_stderr,none": 0.03433919627548533,
						"acc_stderr,none": 0.03433919627548533,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2732919254658385,
						"acc_norm,none": 0.2732919254658385,
						"acc_norm_stderr,none": 0.035231683977370906,
						"acc_stderr,none": 0.035231683977370906,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.21875,
						"acc_norm,none": 0.21875,
						"acc_norm_stderr,none": 0.032784644885244255,
						"acc_stderr,none": 0.032784644885244255,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.013952647384210289,
						"mcc_stderr,none": 0.030274423637635228
					},
					"copa": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.04163331998932261,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5223320628870891,
						"acc_stderr,none": 0.009416411982736139,
						"alias": "glue",
						"f1,none": 0.43414811698961037,
						"f1_stderr,none": 0.000354350107398487,
						"mcc,none": 0.013952647384210289,
						"mcc_stderr,none": 0.030274423637635228
					},
					"hellaswag": {
						"acc,none": 0.42421828321051586,
						"acc_norm,none": 0.5490938060147381,
						"acc_norm_stderr,none": 0.0049656703981273516,
						"acc_stderr,none": 0.0049321371266254094,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.623035125169804,
						"acc_stderr,none": 0.022428336704811407,
						"alias": "lambada",
						"perplexity,none": 5.959663214521177,
						"perplexity_stderr,none": 0.5583040352382076
					},
					"lambada_multilingual": {
						"acc,none": 0.45243547448088495,
						"acc_stderr,none": 0.08555420204631338,
						"alias": "lambada_multilingual",
						"perplexity,none": 42.19501337611109,
						"perplexity_stderr,none": 16.347052934156604
					},
					"lambada_openai": {
						"acc,none": 0.6650494857364642,
						"acc_stderr,none": 0.006575509026332652,
						"alias": " - lambada_openai",
						"perplexity,none": 4.888536161067676,
						"perplexity_stderr,none": 0.11449593232837851
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3524160683097225,
						"acc_stderr,none": 0.00665561138214351,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 63.357032632967375,
						"perplexity_stderr,none": 3.7801856742801543
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6656316708713371,
						"acc_stderr,none": 0.006572666992809176,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.8867879861519405,
						"perplexity_stderr,none": 0.11441582153648308
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.36425383271880457,
						"acc_stderr,none": 0.006704339729528897,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 62.482170248077246,
						"perplexity_stderr,none": 3.409477239406046
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.45449252862410244,
						"acc_stderr,none": 0.006937065554202134,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 33.139547912515134,
						"perplexity_stderr,none": 1.7714567883450667
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.425383271880458,
						"acc_stderr,none": 0.006887972570117886,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 47.109528100843775,
						"perplexity_stderr,none": 2.720651652983326
					},
					"lambada_standard": {
						"acc,none": 0.5802445177566466,
						"acc_stderr,none": 0.006875682029552121,
						"alias": " - lambada_standard",
						"perplexity,none": 7.032647664975357,
						"perplexity_stderr,none": 0.18605678900018505
					},
					"logiqa": {
						"acc,none": 0.23655913978494625,
						"acc_norm,none": 0.29493087557603687,
						"acc_norm_stderr,none": 0.017886249734104385,
						"acc_stderr,none": 0.016668667667174196,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.26192850021364483,
						"acc_stderr,none": 0.04361304463996174,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.037857144650666544,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.27631578947368424,
						"acc_stderr,none": 0.03639057569952925,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3132075471698113,
						"acc_stderr,none": 0.02854479331905533,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2847222222222222,
						"acc_stderr,none": 0.03773809990686935,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909282,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.0416333199893227,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3468208092485549,
						"acc_stderr,none": 0.036291466701596636,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.3627450980392157,
						"acc_stderr,none": 0.04784060704105652,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.20425531914893616,
						"acc_stderr,none": 0.026355158413349414,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2894736842105263,
						"acc_stderr,none": 0.04266339443159394,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2689655172413793,
						"acc_stderr,none": 0.036951833116502325,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.28835978835978837,
						"acc_stderr,none": 0.023330654054535892,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2619047619047619,
						"acc_stderr,none": 0.03932537680392871,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2806451612903226,
						"acc_stderr,none": 0.025560604721022895,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.03144712581678242,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.04461960433384741,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.2545454545454545,
						"acc_stderr,none": 0.03401506715249039,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.32323232323232326,
						"acc_stderr,none": 0.03332299921070644,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.30569948186528495,
						"acc_stderr,none": 0.033248379397581594,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.31025641025641026,
						"acc_stderr,none": 0.02345467488940429,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24814814814814815,
						"acc_stderr,none": 0.0263357394040558,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.31932773109243695,
						"acc_stderr,none": 0.030283995525884396,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.31125827814569534,
						"acc_stderr,none": 0.03780445850526733,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.29174311926605506,
						"acc_stderr,none": 0.019489300968876532,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.03054674526495318,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.2696078431372549,
						"acc_stderr,none": 0.031145570659486782,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2489451476793249,
						"acc_stderr,none": 0.028146970599422644,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.14349775784753363,
						"acc_stderr,none": 0.02352937126961819,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2366412213740458,
						"acc_stderr,none": 0.037276735755969195,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2463336875664187,
						"acc_stderr,none": 0.028230860495196464,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.1487603305785124,
						"acc_stderr,none": 0.032484700838071943,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.0401910747255735,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.26380368098159507,
						"acc_stderr,none": 0.03462419931615624,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.21428571428571427,
						"acc_stderr,none": 0.03894641120044792,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4077669902912621,
						"acc_stderr,none": 0.048657775704107696,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.027236013946196694,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2120051085568327,
						"acc_stderr,none": 0.014616099385833704,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2398843930635838,
						"acc_stderr,none": 0.02298959254312357,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24134078212290502,
						"acc_stderr,none": 0.014310999547961459,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.026090162504279032,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.25555197940135177,
						"acc_stderr,none": 0.0598280484378345,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2572347266881029,
						"acc_stderr,none": 0.024826171289250888,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.24691358024691357,
						"acc_stderr,none": 0.023993501709042107,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2198581560283688,
						"acc_stderr,none": 0.02470614107070548,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.25488917861799215,
						"acc_stderr,none": 0.011130509812662977,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.34558823529411764,
						"acc_stderr,none": 0.02888819310398864,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.24183006535947713,
						"acc_stderr,none": 0.017322789207784326,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2636363636363636,
						"acc_stderr,none": 0.04220224692971987,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.30612244897959184,
						"acc_stderr,none": 0.029504896454595968,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.28501787455313615,
						"acc_stderr,none": 0.03617568514286364,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.26865671641791045,
						"acc_stderr,none": 0.03134328358208954,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2689502061528704,
						"acc_stderr,none": 0.045880966709267286,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421296,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.19879518072289157,
						"acc_stderr,none": 0.03106939026078942,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.2046783625730994,
						"acc_stderr,none": 0.030944459778533207,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3563932755985736,
						"acc_stderr,none": 0.004834504961378275,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.34947111472742065,
						"acc_stderr,none": 0.004808837339702902,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.35049019607843135,
						"acc_stderr,none": 0.023650133032612784,
						"alias": " - mrpc",
						"f1,none": 0.23631123919308358,
						"f1_stderr,none": 0.030240429419107217
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.002770083102493075,
						"exact_match_stderr,remove_whitespace": 0.0008748842684970852
					},
					"openbookqa": {
						"acc,none": 0.252,
						"acc_norm,none": 0.362,
						"acc_norm_stderr,none": 0.021513662527582404,
						"acc_stderr,none": 0.01943572728224953,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4835,
						"acc_stderr,none": 0.011177045144808304,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4575,
						"acc_stderr,none": 0.01114266370654862,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.01116020945760289,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.551,
						"acc_stderr,none": 0.011124809242874423,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5555,
						"acc_stderr,none": 0.011114028784284505,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.011177408788874894,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5425,
						"acc_stderr,none": 0.011142663706548617,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5197142857142858,
						"acc_stderr,none": 0.029959301710343635,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7105549510337323,
						"acc_norm,none": 0.7154515778019587,
						"acc_norm_stderr,none": 0.010527218464130617,
						"acc_stderr,none": 0.010581014740675607,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7184053757437052,
						"acc_norm,none": 0.5029584596896837,
						"acc_norm_stderr,none": 0.00852840229158927,
						"acc_stderr,none": 0.14676731790570477,
						"alias": "pythia",
						"bits_per_byte,none": 0.7214621063785158,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6488522254508533,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.888536161067676,
						"perplexity_stderr,none": 0.11449593232837851,
						"word_perplexity,none": 14.500020169133325,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.5053999633900788,
						"acc_stderr,none": 0.00676501598687746,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6029680930002473,
						"acc_stderr,none": 0.0024333992557865223,
						"alias": " - qqp",
						"f1,none": 0.4358613903141913,
						"f1_stderr,none": 0.0036705542343576023
					},
					"record": {
						"alias": "record",
						"em,none": 0.2756,
						"em_stderr,none": 0.0044683845678350305,
						"f1,none": 0.28402190496623514,
						"f1_stderr,none": 0.004476397153117576
					},
					"rte": {
						"acc,none": 0.51985559566787,
						"acc_stderr,none": 0.030072723167317184,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.9,
						"acc_norm,none": 0.866,
						"acc_norm_stderr,none": 0.01077776229836968,
						"acc_stderr,none": 0.00949157995752506,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.7935779816513762,
						"acc_stderr,none": 0.013713987123090356,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.30919078754499657,
						"acc_stderr,none": 0.001768209334740202,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3072215422276622,
						"bleu_acc_stderr,none": 0.016150201321323016,
						"bleu_diff,none": -5.356411909597049,
						"bleu_diff_stderr,none": 0.7441352853582645,
						"bleu_max,none": 22.58088264622209,
						"bleu_max_stderr,none": 0.7430416673569418,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.0155507783328429,
						"rouge1_diff,none": -7.893976907400489,
						"rouge1_diff_stderr,none": 0.8501987046287365,
						"rouge1_max,none": 46.41737588456714,
						"rouge1_max_stderr,none": 0.8769187951240192,
						"rouge2_acc,none": 0.2350061199510404,
						"rouge2_acc_stderr,none": 0.014843061507731618,
						"rouge2_diff,none": -9.44019531244224,
						"rouge2_diff_stderr,none": 0.9852642576547831,
						"rouge2_max,none": 29.77842751142627,
						"rouge2_max_stderr,none": 0.9730406055244815,
						"rougeL_acc,none": 0.26438188494492043,
						"rougeL_acc_stderr,none": 0.015438211119522517,
						"rougeL_diff,none": -8.02646858058999,
						"rougeL_diff_stderr,none": 0.8527645832343129,
						"rougeL_max,none": 43.720025060819225,
						"rougeL_max_stderr,none": 0.886414072611252
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3072215422276622,
						"bleu_acc_stderr,none": 0.016150201321323016,
						"bleu_diff,none": -5.356411909597049,
						"bleu_diff_stderr,none": 0.7441352853582645,
						"bleu_max,none": 22.58088264622209,
						"bleu_max_stderr,none": 0.7430416673569418,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.0155507783328429,
						"rouge1_diff,none": -7.893976907400489,
						"rouge1_diff_stderr,none": 0.8501987046287365,
						"rouge1_max,none": 46.41737588456714,
						"rouge1_max_stderr,none": 0.8769187951240192,
						"rouge2_acc,none": 0.2350061199510404,
						"rouge2_acc_stderr,none": 0.014843061507731618,
						"rouge2_diff,none": -9.44019531244224,
						"rouge2_diff_stderr,none": 0.9852642576547831,
						"rouge2_max,none": 29.77842751142627,
						"rouge2_max_stderr,none": 0.9730406055244815,
						"rougeL_acc,none": 0.26438188494492043,
						"rougeL_acc_stderr,none": 0.015438211119522517,
						"rougeL_diff,none": -8.02646858058999,
						"rougeL_diff_stderr,none": 0.8527645832343129,
						"rougeL_max,none": 43.720025060819225,
						"rougeL_max_stderr,none": 0.886414072611252
					},
					"truthfulqa_mc1": {
						"acc,none": 0.23011015911872704,
						"acc_stderr,none": 0.014734557959807763,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3882714159712661,
						"acc_stderr,none": 0.013812079719880825,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.7214621063785158,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6488522254508533,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 14.500020169133325,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.5990528808208366,
						"acc_stderr,none": 0.013773974554948032,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.4807692307692308,
						"acc_stderr,none": 0.0492300107297805,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.5780000000000001,
						"acc_stderr,none": 0.045385626894231174,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.02218721580302901,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.64,
						"acc_stderr,none": 0.021487751089720526,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.636,
						"acc_stderr,none": 0.021539170637317688,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.02224224437573102,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.02231133324528967,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.02218721580302901,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.02223197069632112,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.021683827539286122,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.646,
						"acc_stderr,none": 0.021407582047916447,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4060508701472557,
						"acc_stderr,none": 0.04685035504508295,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3321285140562249,
						"acc_stderr,none": 0.009440328001240639,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.43092369477911646,
						"acc_stderr,none": 0.009925970741520651,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.45502008032128516,
						"acc_stderr,none": 0.009981437307797271,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3678714859437751,
						"acc_stderr,none": 0.009665811145047429,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5144578313253012,
						"acc_stderr,none": 0.010017882185606019,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4530120481927711,
						"acc_stderr,none": 0.009977719904353734,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4634538152610442,
						"acc_stderr,none": 0.009995265580368916,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.37349397590361444,
						"acc_stderr,none": 0.00969598596221976,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.44779116465863456,
						"acc_stderr,none": 0.009967287545636121,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3465863453815261,
						"acc_stderr,none": 0.009538660220458996,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.38835341365461845,
						"acc_stderr,none": 0.009769028875673288,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.39759036144578314,
						"acc_stderr,none": 0.009809602996075811,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3433734939759036,
						"acc_stderr,none": 0.009517658993060707,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42811244979919677,
						"acc_stderr,none": 0.009917947421067469,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3485943775100402,
						"acc_stderr,none": 0.009551542053301817,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5793875218097587,
						"acc_stderr,none": 0.05600076349145379,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5294506949040371,
						"acc_stderr,none": 0.012844785490016995,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7246856386499008,
						"acc_stderr,none": 0.011494783262044594,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6267372600926538,
						"acc_stderr,none": 0.012446911553527134,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5334215751158173,
						"acc_stderr,none": 0.01283834793473167,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5433487756452681,
						"acc_stderr,none": 0.012818676452481956,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6214427531436135,
						"acc_stderr,none": 0.012481818770003606,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.49172733289212445,
						"acc_stderr,none": 0.012865364020375405,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6201191264063534,
						"acc_stderr,none": 0.012490298101718164,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5175380542686963,
						"acc_stderr,none": 0.012859207453266304,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.57114493712773,
						"acc_stderr,none": 0.012736202713147777,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5936465916611515,
						"acc_stderr,none": 0.012639429420389871,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7356709372892785,
						"acc_stderr,none": 0.04481749630313415,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.813763440860215,
						"acc_stderr,none": 0.008075389803169178,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6867469879518072,
						"acc_stderr,none": 0.05121994210658146,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6120959332638165,
						"acc_stderr,none": 0.015743060297873944,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6692015209125475,
						"acc_stderr,none": 0.02906762615931534,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6634920634920635,
						"acc_stderr,none": 0.02666555933592601,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6984126984126984,
						"acc_stderr,none": 0.020463437846223783,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/R4-1B5-With-Mask_pth"
	},
	"./rwkv-x-dev/R4-7B-15t-32k-No-Mask_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6113303269447576,
						"acc_norm,none": 0.5947012401352875,
						"acc_norm_stderr,none": 0.0914072707939698,
						"acc_stderr,none": 0.11191553108836015,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.354375,
						"acc_stderr,none": 0.014808102548524724,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8178059701492537,
						"acc_stderr,none": 0.16254668811826864,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2914867898463133,
						"acc_norm,none": 0.2914867898463133,
						"acc_norm_stderr,none": 0.05100450359980866,
						"acc_stderr,none": 0.05100450359980866,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5083075273939971,
						"acc_stderr,none": 0.008916698851211186,
						"alias": "glue",
						"f1,none": 0.6335365536327022,
						"f1_stderr,none": 0.0003919156544367018,
						"mcc,none": -0.03589254563226399,
						"mcc_stderr,none": 0.012097905313463839
					},
					"lambada": {
						"acc,none": 0.7028915195032021,
						"acc_stderr,none": 0.017134301697928622,
						"alias": "lambada",
						"perplexity,none": 4.0119246187848905,
						"perplexity_stderr,none": 0.2781222263917229
					},
					"lambada_multilingual": {
						"acc,none": 0.5292062875994566,
						"acc_stderr,none": 0.08517139509231783,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.8345824619413,
						"perplexity_stderr,none": 8.555112998148484
					},
					"mmlu": {
						"acc,none": 0.2774533542230451,
						"acc_stderr,none": 0.042627388024387494,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.26652497343251863,
						"acc_stderr,none": 0.032506806012049,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.30511747666559386,
						"acc_stderr,none": 0.045966178946316015,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.27169320766980826,
						"acc_stderr,none": 0.0411451349628729,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2721217887725976,
						"acc_stderr,none": 0.04769578954597017,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.47778571428571437,
						"acc_stderr,none": 0.054437902674784384,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7192790812663622,
						"acc_norm,none": 0.5994166679345544,
						"acc_norm_stderr,none": 0.010413355052982578,
						"acc_stderr,none": 0.15245467393927561,
						"alias": "pythia",
						"bits_per_byte,none": 0.6353454668720816,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5533096600527558,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4839615256227563,
						"perplexity_stderr,none": 0.06825539068259785,
						"word_perplexity,none": 10.537629989702536,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3105985610824635,
						"acc_stderr,none": 0.0014562887019219548,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2974296205630355,
						"bleu_acc_stderr,none": 0.016002651487360995,
						"bleu_diff,none": -8.973939239409965,
						"bleu_diff_stderr,none": 0.8752505933373075,
						"bleu_max,none": 26.825281557575725,
						"bleu_max_stderr,none": 0.8078042522947256,
						"rouge1_acc,none": 0.2558139534883721,
						"rouge1_acc_stderr,none": 0.01527417621928336,
						"rouge1_diff,none": -11.330034156766095,
						"rouge1_diff_stderr,none": 0.9208464330116674,
						"rouge1_max,none": 51.80548782028358,
						"rouge1_max_stderr,none": 0.8848949049709126,
						"rouge2_acc,none": 0.23378212974296206,
						"rouge2_acc_stderr,none": 0.014816195991931591,
						"rouge2_diff,none": -13.679565380481936,
						"rouge2_diff_stderr,none": 1.1193687089506261,
						"rouge2_max,none": 35.61355443275106,
						"rouge2_max_stderr,none": 1.0327595203212836,
						"rougeL_acc,none": 0.25458996328029376,
						"rougeL_acc_stderr,none": 0.015250117079156496,
						"rougeL_diff,none": -11.749506150553147,
						"rougeL_diff_stderr,none": 0.9309650546800619,
						"rougeL_max,none": 48.91216063127378,
						"rougeL_max_stderr,none": 0.905142600084208
					},
					"xcopa": {
						"acc,none": 0.6243636363636362,
						"acc_stderr,none": 0.0712582463409568,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4375368139223561,
						"acc_stderr,none": 0.05306464453652511,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6250526442452319,
						"acc_stderr,none": 0.06339913543183494,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.814789840413576,
						"acc_stderr,none": 0.03434770530799896,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6113303269447576,
						"acc_norm,none": 0.5947012401352875,
						"acc_norm_stderr,none": 0.0914072707939698,
						"acc_stderr,none": 0.11191553108836015,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.354375,
						"acc_stderr,none": 0.014808102548524724,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.01516792886540756,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.356,
						"acc_stderr,none": 0.015149042659306628,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.013774667009018552,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.37457337883959047,
						"acc_norm,none": 0.40187713310580203,
						"acc_norm_stderr,none": 0.014327268614578274,
						"acc_stderr,none": 0.014144193471893433,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7281144781144782,
						"acc_norm,none": 0.6898148148148148,
						"acc_norm_stderr,none": 0.009491721291998515,
						"acc_stderr,none": 0.009129795867310492,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8178059701492537,
						"acc_stderr,none": 0.16254668811826864,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621216,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745029993,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298384,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.011772110370812185,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.00969892102602496,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.764,
						"acc_stderr,none": 0.013434451402438671,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.541,
						"acc_stderr,none": 0.01576602573788216,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.776,
						"acc_stderr,none": 0.01319083007236446,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.010878848714333316,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403623,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557433,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380712,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033842,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866446,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306494,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904624,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.01460648312734276,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.014174516461485253,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.013607356839598121,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286416,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.01042749887234396,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910639,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.273,
						"acc_stderr,none": 0.014095022868717595,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151098,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816324,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.607,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.757,
						"acc_stderr,none": 0.013569640199177441,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177549,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397335,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323497,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651528,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.618,
						"acc_stderr,none": 0.015372453034968514,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037193,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.451,
						"acc_stderr,none": 0.015743152379585536,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.631,
						"acc_stderr,none": 0.015266698139154624,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.565,
						"acc_stderr,none": 0.015685057252717204,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653874,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.618,
						"acc_stderr,none": 0.015372453034968519,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662716,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.00961683333969579,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877364,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557415,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.001413505570557814,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099173,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.491,
						"acc_stderr,none": 0.015816736995005392,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817136,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.01049924922240803,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403623,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.725,
						"acc_stderr,none": 0.014127086556490535,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.447,
						"acc_stderr,none": 0.01573017604600905,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.00949157995752505,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.015459721957493377,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.01087884871433331,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963528036,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.01267910721461733,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.01107581480856704,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.0074548356504067275,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315158,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178358,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.0046408552592747026,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.393,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.014498627873361425,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2914867898463133,
						"acc_norm,none": 0.2914867898463133,
						"acc_norm_stderr,none": 0.05100450359980866,
						"acc_stderr,none": 0.05100450359980866,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.30405405405405406,
						"acc_norm,none": 0.30405405405405406,
						"acc_norm_stderr,none": 0.03794062549620372,
						"acc_stderr,none": 0.03794062549620372,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.3875,
						"acc_norm,none": 0.3875,
						"acc_norm_stderr,none": 0.038635838122414064,
						"acc_stderr,none": 0.038635838122414064,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3212121212121212,
						"acc_norm,none": 0.3212121212121212,
						"acc_norm_stderr,none": 0.03646204963253812,
						"acc_stderr,none": 0.03646204963253812,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.33014354066985646,
						"acc_norm,none": 0.33014354066985646,
						"acc_norm_stderr,none": 0.03260698244181308,
						"acc_stderr,none": 0.03260698244181308,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2900763358778626,
						"acc_norm,none": 0.2900763358778626,
						"acc_norm_stderr,none": 0.039800662464677665,
						"acc_stderr,none": 0.039800662464677665,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.34558823529411764,
						"acc_norm,none": 0.34558823529411764,
						"acc_norm_stderr,none": 0.04092966025145302,
						"acc_stderr,none": 0.04092966025145302,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.0452235007738203,
						"acc_stderr,none": 0.0452235007738203,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.30959752321981426,
						"acc_norm,none": 0.30959752321981426,
						"acc_norm_stderr,none": 0.025764515105490108,
						"acc_stderr,none": 0.025764515105490108,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28921568627450983,
						"acc_norm,none": 0.28921568627450983,
						"acc_norm_stderr,none": 0.03182231867647555,
						"acc_stderr,none": 0.03182231867647555,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2681564245810056,
						"acc_norm,none": 0.2681564245810056,
						"acc_norm_stderr,none": 0.03320421630673714,
						"acc_stderr,none": 0.03320421630673714,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.270042194092827,
						"acc_norm,none": 0.270042194092827,
						"acc_norm_stderr,none": 0.028900721906293426,
						"acc_stderr,none": 0.028900721906293426,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.16037735849056603,
						"acc_norm,none": 0.16037735849056603,
						"acc_norm_stderr,none": 0.03581120619691076,
						"acc_stderr,none": 0.03581120619691076,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.37383177570093457,
						"acc_norm,none": 0.37383177570093457,
						"acc_norm_stderr,none": 0.04699273118994851,
						"acc_stderr,none": 0.04699273118994851,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.37735849056603776,
						"acc_norm,none": 0.37735849056603776,
						"acc_norm_stderr,none": 0.047304390228528934,
						"acc_stderr,none": 0.047304390228528934,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.19444444444444445,
						"acc_norm,none": 0.19444444444444445,
						"acc_norm_stderr,none": 0.03826076324884864,
						"acc_stderr,none": 0.03826076324884864,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800375,
						"acc_stderr,none": 0.04142972007800375,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.28205128205128205,
						"acc_norm,none": 0.28205128205128205,
						"acc_norm_stderr,none": 0.02728514708163732,
						"acc_stderr,none": 0.02728514708163732,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3137254901960784,
						"acc_norm,none": 0.3137254901960784,
						"acc_norm_stderr,none": 0.032566854844603886,
						"acc_stderr,none": 0.032566854844603886,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.033773102522091945,
						"acc_stderr,none": 0.033773102522091945,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.037387423042158106,
						"acc_stderr,none": 0.037387423042158106,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2949640287769784,
						"acc_norm,none": 0.2949640287769784,
						"acc_norm_stderr,none": 0.038819561267357076,
						"acc_stderr,none": 0.038819561267357076,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.32075471698113206,
						"acc_norm,none": 0.32075471698113206,
						"acc_norm_stderr,none": 0.03713396279871007,
						"acc_stderr,none": 0.03713396279871007,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3006134969325153,
						"acc_norm,none": 0.3006134969325153,
						"acc_norm_stderr,none": 0.03602511318806771,
						"acc_stderr,none": 0.03602511318806771,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.03430085607014882,
						"acc_stderr,none": 0.03430085607014882,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.028394293050790515,
						"acc_stderr,none": 0.028394293050790515,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.25252525252525254,
						"acc_norm,none": 0.25252525252525254,
						"acc_norm_stderr,none": 0.030954055470365904,
						"acc_stderr,none": 0.030954055470365904,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3949579831932773,
						"acc_norm,none": 0.3949579831932773,
						"acc_norm_stderr,none": 0.03175367846096625,
						"acc_stderr,none": 0.03175367846096625,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26521739130434785,
						"acc_norm,none": 0.26521739130434785,
						"acc_norm_stderr,none": 0.02917176407847258,
						"acc_stderr,none": 0.02917176407847258,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.34265734265734266,
						"acc_norm,none": 0.34265734265734266,
						"acc_norm_stderr,none": 0.039827381778096436,
						"acc_stderr,none": 0.039827381778096436,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26704545454545453,
						"acc_norm,none": 0.26704545454545453,
						"acc_norm_stderr,none": 0.03344352850079126,
						"acc_stderr,none": 0.03344352850079126,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.30201342281879195,
						"acc_norm,none": 0.30201342281879195,
						"acc_norm_stderr,none": 0.03774033930941344,
						"acc_stderr,none": 0.03774033930941344,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516737,
						"acc_stderr,none": 0.03385633936516737,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552485,
						"acc_stderr,none": 0.03703667194552485,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2627118644067797,
						"acc_norm,none": 0.2627118644067797,
						"acc_norm_stderr,none": 0.04068792432070351,
						"acc_stderr,none": 0.04068792432070351,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.04013964554072773,
						"acc_stderr,none": 0.04013964554072773,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.27972027972027974,
						"acc_norm,none": 0.27972027972027974,
						"acc_norm_stderr,none": 0.037667638895398536,
						"acc_stderr,none": 0.037667638895398536,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.23015873015873015,
						"acc_norm,none": 0.23015873015873015,
						"acc_norm_stderr,none": 0.037649508797906066,
						"acc_stderr,none": 0.037649508797906066,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.03231470996617759,
						"acc_stderr,none": 0.03231470996617759,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3372093023255814,
						"acc_norm,none": 0.3372093023255814,
						"acc_norm_stderr,none": 0.03615263198871637,
						"acc_stderr,none": 0.03615263198871637,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2944038929440389,
						"acc_norm,none": 0.2944038929440389,
						"acc_norm_stderr,none": 0.022509089804193687,
						"acc_stderr,none": 0.022509089804193687,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.35514018691588783,
						"acc_norm,none": 0.35514018691588783,
						"acc_norm_stderr,none": 0.032790101746569884,
						"acc_stderr,none": 0.032790101746569884,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3170731707317073,
						"acc_norm,none": 0.3170731707317073,
						"acc_norm_stderr,none": 0.04212955964853051,
						"acc_stderr,none": 0.04212955964853051,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.32786885245901637,
						"acc_norm,none": 0.32786885245901637,
						"acc_norm_stderr,none": 0.04267606874299955,
						"acc_stderr,none": 0.04267606874299955,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.29523809523809524,
						"acc_norm,none": 0.29523809523809524,
						"acc_norm_stderr,none": 0.03155253554505398,
						"acc_stderr,none": 0.03155253554505398,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.03387720998298804,
						"acc_stderr,none": 0.03387720998298804,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2804232804232804,
						"acc_norm,none": 0.2804232804232804,
						"acc_norm_stderr,none": 0.03276171742795849,
						"acc_stderr,none": 0.03276171742795849,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.29310344827586204,
						"acc_norm,none": 0.29310344827586204,
						"acc_norm_stderr,none": 0.04244626443180182,
						"acc_stderr,none": 0.04244626443180182,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.30344827586206896,
						"acc_norm,none": 0.30344827586206896,
						"acc_norm_stderr,none": 0.038312260488503336,
						"acc_stderr,none": 0.038312260488503336,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.0451367671816831,
						"acc_stderr,none": 0.0451367671816831,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03424737867752743,
						"acc_stderr,none": 0.03424737867752743,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846666,
						"acc_stderr,none": 0.030469670650846666,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2579787234042553,
						"acc_norm,none": 0.2579787234042553,
						"acc_norm_stderr,none": 0.02259355080105626,
						"acc_stderr,none": 0.02259355080105626,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3275862068965517,
						"acc_norm,none": 0.3275862068965517,
						"acc_norm_stderr,none": 0.03087984562096084,
						"acc_stderr,none": 0.03087984562096084,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3620689655172414,
						"acc_norm,none": 0.3620689655172414,
						"acc_norm_stderr,none": 0.0365392361546597,
						"acc_stderr,none": 0.0365392361546597,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.29646017699115046,
						"acc_norm,none": 0.29646017699115046,
						"acc_norm_stderr,none": 0.03044642219079463,
						"acc_stderr,none": 0.03044642219079463,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.035679697722680474,
						"acc_stderr,none": 0.035679697722680474,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741764,
						"acc_stderr,none": 0.03351597731741764,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03560846537586734,
						"acc_stderr,none": 0.03560846537586734,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2919254658385093,
						"acc_norm,none": 0.2919254658385093,
						"acc_norm_stderr,none": 0.03594309636716404,
						"acc_stderr,none": 0.03594309636716404,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018759,
						"acc_stderr,none": 0.03489370652018759,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.03589254563226399,
						"mcc_stderr,none": 0.012097905313463839
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.034873508801977704,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5083075273939971,
						"acc_stderr,none": 0.008916698851211186,
						"alias": "glue",
						"f1,none": 0.6335365536327022,
						"f1_stderr,none": 0.0003919156544367018,
						"mcc,none": -0.03589254563226399,
						"mcc_stderr,none": 0.012097905313463839
					},
					"hellaswag": {
						"acc,none": 0.5252937661820355,
						"acc_norm,none": 0.7090221071499702,
						"acc_norm_stderr,none": 0.004532850566893532,
						"acc_stderr,none": 0.004983392650570965,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7028915195032021,
						"acc_stderr,none": 0.017134301697928622,
						"alias": "lambada",
						"perplexity,none": 4.0119246187848905,
						"perplexity_stderr,none": 0.2781222263917229
					},
					"lambada_multilingual": {
						"acc,none": 0.5292062875994566,
						"acc_stderr,none": 0.08517139509231783,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.8345824619413,
						"perplexity_stderr,none": 8.555112998148484
					},
					"lambada_openai": {
						"acc,none": 0.7349117019212109,
						"acc_stderr,none": 0.0061492894021581606,
						"alias": " - lambada_openai",
						"perplexity,none": 3.4839615256227563,
						"perplexity_stderr,none": 0.06825539068259785
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.41529206287599457,
						"acc_stderr,none": 0.006865282027647141,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 35.723089010140434,
						"perplexity_stderr,none": 1.9403832389612334
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7347176402095866,
						"acc_stderr,none": 0.006150727583054028,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.4814307451517883,
						"perplexity_stderr,none": 0.06814444041730382
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.44459538133126336,
						"acc_stderr,none": 0.006923079035848192,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 30.391300987301697,
						"perplexity_stderr,none": 1.4777111126297038
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5458955947991462,
						"acc_stderr,none": 0.006936569231082093,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.889164782536024,
						"perplexity_stderr,none": 0.8064949743881313
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5055307587812925,
						"acc_stderr,none": 0.00696555147549591,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.687926784576565,
						"perplexity_stderr,none": 1.1936556627523554
					},
					"lambada_standard": {
						"acc,none": 0.6710653987968174,
						"acc_stderr,none": 0.006545597195850582,
						"alias": " - lambada_standard",
						"perplexity,none": 4.541704235524136,
						"perplexity_stderr,none": 0.09855031862569001
					},
					"logiqa": {
						"acc,none": 0.22734254992319508,
						"acc_norm,none": 0.2749615975422427,
						"acc_norm_stderr,none": 0.017512971782225214,
						"acc_stderr,none": 0.016439067675117762,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.2774533542230451,
						"acc_stderr,none": 0.042627388024387494,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.3037037037037037,
						"acc_stderr,none": 0.039725528847851355,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.26973684210526316,
						"acc_stderr,none": 0.03611780560284898,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.33584905660377357,
						"acc_stderr,none": 0.02906722014664483,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2916666666666667,
						"acc_stderr,none": 0.03800968060554858,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421276,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.21965317919075145,
						"acc_stderr,none": 0.031568093627031744,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.040925639582376556,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3191489361702128,
						"acc_stderr,none": 0.030472973363380045,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2543859649122807,
						"acc_stderr,none": 0.0409698513984367,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.03724563619774632,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.25132275132275134,
						"acc_stderr,none": 0.022340482339643898,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.04073524322147126,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.34516129032258064,
						"acc_stderr,none": 0.027045746573534327,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2413793103448276,
						"acc_stderr,none": 0.03010833071801162,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.0347769116216366,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.29292929292929293,
						"acc_stderr,none": 0.03242497958178817,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.35233160621761656,
						"acc_stderr,none": 0.03447478286414357,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.23846153846153847,
						"acc_stderr,none": 0.021606294494647727,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.026719240783712177,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2605042016806723,
						"acc_stderr,none": 0.02851025151234193,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.27339449541284405,
						"acc_stderr,none": 0.0191092998460983,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.19444444444444445,
						"acc_stderr,none": 0.02699145450203672,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.0309645179269234,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.27848101265822783,
						"acc_stderr,none": 0.029178682304842548,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3721973094170404,
						"acc_stderr,none": 0.03244305283008731,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2748091603053435,
						"acc_stderr,none": 0.039153454088478354,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.26652497343251863,
						"acc_stderr,none": 0.032506806012049,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.33884297520661155,
						"acc_stderr,none": 0.04320767807536671,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.32407407407407407,
						"acc_stderr,none": 0.0452459600703005,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.294478527607362,
						"acc_stderr,none": 0.03581165790474082,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3392857142857143,
						"acc_stderr,none": 0.04493949068613539,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2912621359223301,
						"acc_stderr,none": 0.044986763205729224,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.31196581196581197,
						"acc_stderr,none": 0.030351527323344937,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.36015325670498083,
						"acc_stderr,none": 0.017166362471369292,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2774566473988439,
						"acc_stderr,none": 0.024105712607754307,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2346368715083799,
						"acc_stderr,none": 0.014173044098303679,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.024954184324879898,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.30511747666559386,
						"acc_stderr,none": 0.045966178946316015,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2990353697749196,
						"acc_stderr,none": 0.02600330111788514,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.29012345679012347,
						"acc_stderr,none": 0.025251173936495026,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25886524822695034,
						"acc_stderr,none": 0.026129572527180848,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2503259452411995,
						"acc_stderr,none": 0.011064151027165433,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.23897058823529413,
						"acc_stderr,none": 0.02590528064489301,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.27450980392156865,
						"acc_stderr,none": 0.018054027458815198,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.37272727272727274,
						"acc_stderr,none": 0.046313813194254635,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.20816326530612245,
						"acc_stderr,none": 0.025991117672813296,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.27169320766980826,
						"acc_stderr,none": 0.0411451349628729,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.2885572139303483,
						"acc_stderr,none": 0.03203841040213322,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2721217887725976,
						"acc_stderr,none": 0.04769578954597017,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3132530120481928,
						"acc_stderr,none": 0.03610805018031023,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.2982456140350877,
						"acc_stderr,none": 0.03508771929824563,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3398879266428935,
						"acc_stderr,none": 0.004781384619510542,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3417412530512612,
						"acc_stderr,none": 0.004783526399082717,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7647058823529411,
						"acc_stderr,none": 0.02102594605453768,
						"alias": " - mrpc",
						"f1,none": 0.8456591639871383,
						"f1_stderr,none": 0.015590523708891517
					},
					"openbookqa": {
						"acc,none": 0.308,
						"acc_norm,none": 0.414,
						"acc_norm_stderr,none": 0.02204949796982786,
						"acc_stderr,none": 0.0206670329874661,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.427,
						"acc_stderr,none": 0.011063304133448202,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3695,
						"acc_stderr,none": 0.010795515113846488,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.424,
						"acc_stderr,none": 0.011053193499766086,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5465,
						"acc_stderr,none": 0.011134669525078671,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5445,
						"acc_stderr,none": 0.011138757154883975,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5255,
						"acc_stderr,none": 0.01116858288333007,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5075,
						"acc_stderr,none": 0.011181877847485998,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.47778571428571437,
						"acc_stderr,none": 0.054437902674784384,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7584330794341676,
						"acc_norm,none": 0.76550598476605,
						"acc_norm_stderr,none": 0.00988520314324055,
						"acc_stderr,none": 0.009986718001804472,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7192790812663622,
						"acc_norm,none": 0.5994166679345544,
						"acc_norm_stderr,none": 0.010413355052982578,
						"acc_stderr,none": 0.15245467393927561,
						"alias": "pythia",
						"bits_per_byte,none": 0.6353454668720816,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5533096600527558,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4839615256227563,
						"perplexity_stderr,none": 0.06825539068259785,
						"word_perplexity,none": 10.537629989702536,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.5021050704740985,
						"acc_stderr,none": 0.006765350592089545,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.5786544645065546,
						"acc_stderr,none": 0.0024557398218259963,
						"alias": " - qqp",
						"f1,none": 0.6316995654335935,
						"f1_stderr,none": 0.002623074187044246
					},
					"record": {
						"alias": "record",
						"em,none": 0.269,
						"em_stderr,none": 0.004434621357755189,
						"f1,none": 0.27970523836314676,
						"f1_stderr,none": 0.0044472754376228465
					},
					"rte": {
						"acc,none": 0.6245487364620939,
						"acc_stderr,none": 0.029147775180820408,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.944,
						"acc_norm,none": 0.911,
						"acc_norm_stderr,none": 0.009008893392651554,
						"acc_stderr,none": 0.007274401481697049,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9013761467889908,
						"acc_stderr,none": 0.010102641365451148,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3105985610824635,
						"acc_stderr,none": 0.0014562887019219548,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2974296205630355,
						"bleu_acc_stderr,none": 0.016002651487360995,
						"bleu_diff,none": -8.973939239409965,
						"bleu_diff_stderr,none": 0.8752505933373075,
						"bleu_max,none": 26.825281557575725,
						"bleu_max_stderr,none": 0.8078042522947256,
						"rouge1_acc,none": 0.2558139534883721,
						"rouge1_acc_stderr,none": 0.01527417621928336,
						"rouge1_diff,none": -11.330034156766095,
						"rouge1_diff_stderr,none": 0.9208464330116674,
						"rouge1_max,none": 51.80548782028358,
						"rouge1_max_stderr,none": 0.8848949049709126,
						"rouge2_acc,none": 0.23378212974296206,
						"rouge2_acc_stderr,none": 0.014816195991931591,
						"rouge2_diff,none": -13.679565380481936,
						"rouge2_diff_stderr,none": 1.1193687089506261,
						"rouge2_max,none": 35.61355443275106,
						"rouge2_max_stderr,none": 1.0327595203212836,
						"rougeL_acc,none": 0.25458996328029376,
						"rougeL_acc_stderr,none": 0.015250117079156496,
						"rougeL_diff,none": -11.749506150553147,
						"rougeL_diff_stderr,none": 0.9309650546800619,
						"rougeL_max,none": 48.91216063127378,
						"rougeL_max_stderr,none": 0.905142600084208
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2974296205630355,
						"bleu_acc_stderr,none": 0.016002651487360995,
						"bleu_diff,none": -8.973939239409965,
						"bleu_diff_stderr,none": 0.8752505933373075,
						"bleu_max,none": 26.825281557575725,
						"bleu_max_stderr,none": 0.8078042522947256,
						"rouge1_acc,none": 0.2558139534883721,
						"rouge1_acc_stderr,none": 0.01527417621928336,
						"rouge1_diff,none": -11.330034156766095,
						"rouge1_diff_stderr,none": 0.9208464330116674,
						"rouge1_max,none": 51.80548782028358,
						"rouge1_max_stderr,none": 0.8848949049709126,
						"rouge2_acc,none": 0.23378212974296206,
						"rouge2_acc_stderr,none": 0.014816195991931591,
						"rouge2_diff,none": -13.679565380481936,
						"rouge2_diff_stderr,none": 1.1193687089506261,
						"rouge2_max,none": 35.61355443275106,
						"rouge2_max_stderr,none": 1.0327595203212836,
						"rougeL_acc,none": 0.25458996328029376,
						"rougeL_acc_stderr,none": 0.015250117079156496,
						"rougeL_diff,none": -11.749506150553147,
						"rougeL_diff_stderr,none": 0.9309650546800619,
						"rougeL_max,none": 48.91216063127378,
						"rougeL_max_stderr,none": 0.905142600084208
					},
					"truthfulqa_mc1": {
						"acc,none": 0.23990208078335373,
						"acc_stderr,none": 0.01494881267906214,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3812950413815732,
						"acc_stderr,none": 0.013741776166772184,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6353454668720816,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5533096600527558,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.537629989702536,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6827150749802684,
						"acc_stderr,none": 0.01308059841133212,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5211267605633803,
						"acc_stderr,none": 0.05970805879899505,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.4807692307692308,
						"acc_stderr,none": 0.049230010729780505,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6243636363636362,
						"acc_stderr,none": 0.0712582463409568,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.021814300984787635,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.020011219298073535,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.742,
						"acc_stderr,none": 0.019586711785215837,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.022033677993740865,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.634,
						"acc_stderr,none": 0.021564276850201614,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.020475118092988978,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.712,
						"acc_stderr,none": 0.020271503835075217,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4375368139223561,
						"acc_stderr,none": 0.05306464453652511,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3353413654618474,
						"acc_stderr,none": 0.009463034891512706,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4823293172690763,
						"acc_stderr,none": 0.01001581206646117,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4879518072289157,
						"acc_stderr,none": 0.010019162857624487,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39839357429718875,
						"acc_stderr,none": 0.00981295816527095,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5377510040160642,
						"acc_stderr,none": 0.009993466360872791,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4979919678714859,
						"acc_stderr,none": 0.010021992045038411,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5116465863453815,
						"acc_stderr,none": 0.010019353650807703,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41807228915662653,
						"acc_stderr,none": 0.009886618180256037,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4887550200803213,
						"acc_stderr,none": 0.01001953797297508,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.38032128514056224,
						"acc_stderr,none": 0.009730746464767607,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40602409638554215,
						"acc_stderr,none": 0.009843462007384219,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4493975903614458,
						"acc_stderr,none": 0.009970615649588139,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40562248995983935,
						"acc_stderr,none": 0.009841918156163148,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.4108433734939759,
						"acc_stderr,none": 0.009861456841490828,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3526104417670683,
						"acc_stderr,none": 0.009576746271768752,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6250526442452319,
						"acc_stderr,none": 0.06339913543183494,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.6015883520847121,
						"acc_stderr,none": 0.01259874393825287,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7809397749834547,
						"acc_stderr,none": 0.010643931294349703,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7094639311714097,
						"acc_stderr,none": 0.011683600935499847,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.557246856386499,
						"acc_stderr,none": 0.012782510750319245,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5949702183984117,
						"acc_stderr,none": 0.012632887218751374,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6598279285241562,
						"acc_stderr,none": 0.012192034998028836,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.528127068166777,
						"acc_stderr,none": 0.012846749995797699,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6750496360026472,
						"acc_stderr,none": 0.01205279844220021,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5479814692256784,
						"acc_stderr,none": 0.012807742345189279,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5837193911317009,
						"acc_stderr,none": 0.012685473350967527,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6366644606221046,
						"acc_stderr,none": 0.012377153306613275,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.814789840413576,
						"acc_stderr,none": 0.03434770530799896,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8696774193548387,
						"acc_stderr,none": 0.006983463551504547,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7228915662650602,
						"acc_stderr,none": 0.04942589299783093,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7539103232533889,
						"acc_stderr,none": 0.013916300191059498,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7718631178707225,
						"acc_stderr,none": 0.025924909559244272,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.692063492063492,
						"acc_stderr,none": 0.026051860027264458,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7916666666666666,
						"acc_stderr,none": 0.018107836663152056,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/R4-7B-15t-32k-No-Mask_pth"
	},
	"./rwkv-x-dev/R4-7B-15t-No-Mask_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6200676437429538,
						"acc_norm,none": 0.6079481397970687,
						"acc_norm_stderr,none": 0.09240767819389203,
						"acc_stderr,none": 0.11001448571717415,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.36125,
						"acc_stderr,none": 0.014724861303290397,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8228955223880597,
						"acc_stderr,none": 0.16134680241829683,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2907960628561561,
						"acc_norm,none": 0.2907960628561561,
						"acc_norm_stderr,none": 0.05021747755135942,
						"acc_stderr,none": 0.05021747755135942,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5505300142925202,
						"acc_stderr,none": 0.01077202587506087,
						"alias": "glue",
						"f1,none": 0.6623575390121269,
						"f1_stderr,none": 0.0002733825291311283,
						"mcc,none": 0.01845565733408863,
						"mcc_stderr,none": 0.03273758941645293
					},
					"lambada": {
						"acc,none": 0.7087133708519309,
						"acc_stderr,none": 0.016850692633428886,
						"alias": "lambada",
						"perplexity,none": 3.8784881294998965,
						"perplexity_stderr,none": 0.23551879245757332
					},
					"lambada_multilingual": {
						"acc,none": 0.5369687560644285,
						"acc_stderr,none": 0.08430882353217248,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.70048891120678,
						"perplexity_stderr,none": 8.059465734784938
					},
					"mmlu": {
						"acc,none": 0.2829369035749893,
						"acc_stderr,none": 0.04449010491367579,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.27290116896918176,
						"acc_stderr,none": 0.034599615055619884,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.31412938525909234,
						"acc_stderr,none": 0.04595175653871959,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.27331816704582385,
						"acc_stderr,none": 0.04044627009055694,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2765620044402157,
						"acc_stderr,none": 0.052108661789192635,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4712142857142857,
						"acc_stderr,none": 0.060879199130928825,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7244796853908684,
						"acc_norm,none": 0.6125382387809819,
						"acc_norm_stderr,none": 0.0105401711439326,
						"acc_stderr,none": 0.15163443467559284,
						"alias": "pythia",
						"bits_per_byte,none": 0.6357501508537432,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5537454331657439,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4375618453455203,
						"perplexity_stderr,none": 0.06708642097396364,
						"word_perplexity,none": 10.553448186126827,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3094048220699125,
						"acc_stderr,none": 0.0014559091603162711,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.29253365973072215,
						"bleu_acc_stderr,none": 0.015925597445286165,
						"bleu_diff,none": -8.70527035582211,
						"bleu_diff_stderr,none": 0.8771346696462828,
						"bleu_max,none": 26.569192385016013,
						"bleu_max_stderr,none": 0.7964495055065042,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.01532182168847618,
						"rouge1_diff,none": -11.555716613496823,
						"rouge1_diff_stderr,none": 0.9270602103586496,
						"rouge1_max,none": 51.34875667118895,
						"rouge1_max_stderr,none": 0.8718549018689823,
						"rouge2_acc,none": 0.23133414932680538,
						"rouge2_acc_stderr,none": 0.014761945174862665,
						"rouge2_diff,none": -13.558803565520408,
						"rouge2_diff_stderr,none": 1.1237165646231728,
						"rouge2_max,none": 35.073233459869826,
						"rouge2_max_stderr,none": 1.0279496779038944,
						"rougeL_acc,none": 0.2558139534883721,
						"rougeL_acc_stderr,none": 0.015274176219283356,
						"rougeL_diff,none": -11.75666886040966,
						"rougeL_diff_stderr,none": 0.9390181874731942,
						"rougeL_max,none": 48.64655737859748,
						"rougeL_max_stderr,none": 0.8948807147169482
					},
					"xcopa": {
						"acc,none": 0.6245454545454545,
						"acc_stderr,none": 0.07303174156027332,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.44109772423025434,
						"acc_stderr,none": 0.054615893307441855,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6202394561097407,
						"acc_stderr,none": 0.06422614584228693,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8096201393571589,
						"acc_stderr,none": 0.036979114244393055,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6200676437429538,
						"acc_norm,none": 0.6079481397970687,
						"acc_norm_stderr,none": 0.09240767819389203,
						"acc_stderr,none": 0.11001448571717415,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.36125,
						"acc_stderr,none": 0.014724861303290397,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.015186527932040127,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.361,
						"acc_stderr,none": 0.015195720118175115,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3625,
						"acc_stderr,none": 0.013883037874225516,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3873720136518771,
						"acc_norm,none": 0.4129692832764505,
						"acc_norm_stderr,none": 0.014388344935398326,
						"acc_stderr,none": 0.014235872487909876,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7348484848484849,
						"acc_norm,none": 0.7041245791245792,
						"acc_norm_stderr,none": 0.009365854134140063,
						"acc_stderr,none": 0.009057621139172614,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8228955223880597,
						"acc_stderr,none": 0.16134680241829683,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783217,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.003148000938676774,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705577929,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340995,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.749,
						"acc_stderr,none": 0.013718133516888905,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.015654426245029274,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.798,
						"acc_stderr,none": 0.012702651587655147,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.010811655372416053,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098728,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.003299983316607817,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.0062736240211187545,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380712,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.005814534272734947,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.00812557844248792,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323494,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611483,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024949,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.014142984975740663,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.747,
						"acc_stderr,none": 0.01375427861358708,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.766,
						"acc_stderr,none": 0.01339490288966001,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697048,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.010811655372416051,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.00371723254825659,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.271,
						"acc_stderr,none": 0.014062601350986186,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.00900889339265153,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617331,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.782,
						"acc_stderr,none": 0.013063179040595296,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.00791034598317755,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785134,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523713,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.622,
						"acc_stderr,none": 0.01534116525402664,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942324,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.436,
						"acc_stderr,none": 0.015689173023144064,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.015459721957493375,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.015672320237336213,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042965,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.015615500115072956,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345682,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340992,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.766,
						"acc_stderr,none": 0.01339490288966001,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792942,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346939,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996672,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.011884495834541684,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.488,
						"acc_stderr,none": 0.015814743314581818,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697053,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696834,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426513,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.741,
						"acc_stderr,none": 0.01386041525752791,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.457,
						"acc_stderr,none": 0.01576069159013638,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592065,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.015356947477797589,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849877,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381786,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.813,
						"acc_stderr,none": 0.012336254828074125,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378223,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706827,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.0043194510829106065,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.971,
						"acc_stderr,none": 0.005309160685757004,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.412,
						"acc_stderr,none": 0.015572363292015093,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.362,
						"acc_stderr,none": 0.0152048409129195,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2907960628561561,
						"acc_norm,none": 0.2907960628561561,
						"acc_norm_stderr,none": 0.05021747755135942,
						"acc_stderr,none": 0.05021747755135942,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323176,
						"acc_stderr,none": 0.03307162750323176,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.037698374558241474,
						"acc_stderr,none": 0.037698374558241474,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.03839344480212195,
						"acc_stderr,none": 0.03839344480212195,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.03608541011573967,
						"acc_stderr,none": 0.03608541011573967,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3349282296650718,
						"acc_norm,none": 0.3349282296650718,
						"acc_norm_stderr,none": 0.03272491043051243,
						"acc_stderr,none": 0.03272491043051243,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03489370652018761,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2748091603053435,
						"acc_norm,none": 0.2748091603053435,
						"acc_norm_stderr,none": 0.03915345408847837,
						"acc_stderr,none": 0.03915345408847837,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3088235294117647,
						"acc_norm,none": 0.3088235294117647,
						"acc_norm_stderr,none": 0.03976333292288875,
						"acc_stderr,none": 0.03976333292288875,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.29906542056074764,
						"acc_norm,none": 0.29906542056074764,
						"acc_norm_stderr,none": 0.044470182376718334,
						"acc_stderr,none": 0.044470182376718334,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.30340557275541796,
						"acc_norm,none": 0.30340557275541796,
						"acc_norm_stderr,none": 0.02561968812851561,
						"acc_stderr,none": 0.02561968812851561,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.031980016601150726,
						"acc_stderr,none": 0.031980016601150726,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2737430167597765,
						"acc_norm,none": 0.2737430167597765,
						"acc_norm_stderr,none": 0.03342001835130119,
						"acc_stderr,none": 0.03342001835130119,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.23628691983122363,
						"acc_norm,none": 0.23628691983122363,
						"acc_norm_stderr,none": 0.02765215314415926,
						"acc_stderr,none": 0.02765215314415926,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.16981132075471697,
						"acc_norm,none": 0.16981132075471697,
						"acc_norm_stderr,none": 0.036641823111517896,
						"acc_stderr,none": 0.036641823111517896,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.37383177570093457,
						"acc_norm,none": 0.37383177570093457,
						"acc_norm_stderr,none": 0.04699273118994851,
						"acc_stderr,none": 0.04699273118994851,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.37735849056603776,
						"acc_norm,none": 0.37735849056603776,
						"acc_norm_stderr,none": 0.047304390228528934,
						"acc_stderr,none": 0.047304390228528934,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2037037037037037,
						"acc_norm,none": 0.2037037037037037,
						"acc_norm_stderr,none": 0.03893542518824847,
						"acc_stderr,none": 0.03893542518824847,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055041,
						"acc_stderr,none": 0.04232473532055041,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800375,
						"acc_stderr,none": 0.04142972007800375,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.29304029304029305,
						"acc_norm,none": 0.29304029304029305,
						"acc_norm_stderr,none": 0.027597932553584063,
						"acc_stderr,none": 0.027597932553584063,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3137254901960784,
						"acc_norm,none": 0.3137254901960784,
						"acc_norm_stderr,none": 0.032566854844603886,
						"acc_stderr,none": 0.032566854844603886,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.23391812865497075,
						"acc_norm,none": 0.23391812865497075,
						"acc_norm_stderr,none": 0.03246721765117826,
						"acc_stderr,none": 0.03246721765117826,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.03711513959675178,
						"acc_stderr,none": 0.03711513959675178,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2949640287769784,
						"acc_norm,none": 0.2949640287769784,
						"acc_norm_stderr,none": 0.03881956126735707,
						"acc_stderr,none": 0.03881956126735707,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.03767609312195345,
						"acc_stderr,none": 0.03767609312195345,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3006134969325153,
						"acc_norm,none": 0.3006134969325153,
						"acc_norm_stderr,none": 0.03602511318806771,
						"acc_stderr,none": 0.03602511318806771,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.03451628876250621,
						"acc_stderr,none": 0.03451628876250621,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2896825396825397,
						"acc_norm,none": 0.2896825396825397,
						"acc_norm_stderr,none": 0.028631924753360995,
						"acc_stderr,none": 0.028631924753360995,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2878787878787879,
						"acc_norm,none": 0.2878787878787879,
						"acc_norm_stderr,none": 0.03225883512300992,
						"acc_stderr,none": 0.03225883512300992,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.38235294117647056,
						"acc_norm,none": 0.38235294117647056,
						"acc_norm_stderr,none": 0.031566630992154156,
						"acc_stderr,none": 0.031566630992154156,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2565217391304348,
						"acc_norm,none": 0.2565217391304348,
						"acc_norm_stderr,none": 0.028858814315305646,
						"acc_stderr,none": 0.028858814315305646,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.34965034965034963,
						"acc_norm,none": 0.34965034965034963,
						"acc_norm_stderr,none": 0.040017160283823947,
						"acc_stderr,none": 0.040017160283823947,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26136363636363635,
						"acc_norm,none": 0.26136363636363635,
						"acc_norm_stderr,none": 0.033213825516355905,
						"acc_stderr,none": 0.033213825516355905,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2953020134228188,
						"acc_norm,none": 0.2953020134228188,
						"acc_norm_stderr,none": 0.03749763364527049,
						"acc_stderr,none": 0.03749763364527049,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516737,
						"acc_stderr,none": 0.03385633936516737,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552485,
						"acc_stderr,none": 0.03703667194552485,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2711864406779661,
						"acc_norm,none": 0.2711864406779661,
						"acc_norm_stderr,none": 0.041100705493392085,
						"acc_stderr,none": 0.041100705493392085,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.034450002891734596,
						"acc_stderr,none": 0.034450002891734596,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.21818181818181817,
						"acc_norm,none": 0.21818181818181817,
						"acc_norm_stderr,none": 0.03955932861795833,
						"acc_stderr,none": 0.03955932861795833,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.037950002128017815,
						"acc_stderr,none": 0.037950002128017815,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.24603174603174602,
						"acc_norm,none": 0.24603174603174602,
						"acc_norm_stderr,none": 0.03852273364924316,
						"acc_stderr,none": 0.03852273364924316,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.0336955369187772,
						"acc_stderr,none": 0.0336955369187772,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3313953488372093,
						"acc_norm,none": 0.3313953488372093,
						"acc_norm_stderr,none": 0.03599646438179593,
						"acc_stderr,none": 0.03599646438179593,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2944038929440389,
						"acc_norm,none": 0.2944038929440389,
						"acc_norm_stderr,none": 0.022509089804193687,
						"acc_stderr,none": 0.022509089804193687,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.35514018691588783,
						"acc_norm,none": 0.35514018691588783,
						"acc_norm_stderr,none": 0.032790101746569884,
						"acc_stderr,none": 0.032790101746569884,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.040113743936211456,
						"acc_stderr,none": 0.040113743936211456,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3360655737704918,
						"acc_norm,none": 0.3360655737704918,
						"acc_norm_stderr,none": 0.04294196582481048,
						"acc_stderr,none": 0.04294196582481048,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.03184006730473941,
						"acc_stderr,none": 0.03184006730473941,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.29444444444444445,
						"acc_norm,none": 0.29444444444444445,
						"acc_norm_stderr,none": 0.034067540013496884,
						"acc_stderr,none": 0.034067540013496884,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2804232804232804,
						"acc_norm,none": 0.2804232804232804,
						"acc_norm_stderr,none": 0.03276171742795849,
						"acc_stderr,none": 0.03276171742795849,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.041678081808441535,
						"acc_stderr,none": 0.041678081808441535,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.30344827586206896,
						"acc_norm,none": 0.30344827586206896,
						"acc_norm_stderr,none": 0.038312260488503336,
						"acc_stderr,none": 0.038312260488503336,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.0451367671816831,
						"acc_stderr,none": 0.0451367671816831,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03424737867752743,
						"acc_stderr,none": 0.03424737867752743,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846666,
						"acc_stderr,none": 0.030469670650846666,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2632978723404255,
						"acc_norm,none": 0.2632978723404255,
						"acc_norm_stderr,none": 0.022743327388426438,
						"acc_stderr,none": 0.022743327388426438,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3275862068965517,
						"acc_norm,none": 0.3275862068965517,
						"acc_norm_stderr,none": 0.03087984562096084,
						"acc_stderr,none": 0.03087984562096084,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3620689655172414,
						"acc_norm,none": 0.3620689655172414,
						"acc_norm_stderr,none": 0.0365392361546597,
						"acc_stderr,none": 0.0365392361546597,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3008849557522124,
						"acc_norm,none": 0.3008849557522124,
						"acc_norm_stderr,none": 0.030576185297580976,
						"acc_stderr,none": 0.030576185297580976,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.03608541011573967,
						"acc_stderr,none": 0.03608541011573967,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.03369553691877718,
						"acc_stderr,none": 0.03369553691877718,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03560846537586734,
						"acc_stderr,none": 0.03560846537586734,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.034714607440589844,
						"acc_stderr,none": 0.034714607440589844,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.0348937065201876,
						"acc_stderr,none": 0.0348937065201876,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.01845565733408863,
						"mcc_stderr,none": 0.03273758941645293
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.03379976689896309,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5505300142925202,
						"acc_stderr,none": 0.01077202587506087,
						"alias": "glue",
						"f1,none": 0.6623575390121269,
						"f1_stderr,none": 0.0002733825291311283,
						"mcc,none": 0.01845565733408863,
						"mcc_stderr,none": 0.03273758941645293
					},
					"hellaswag": {
						"acc,none": 0.5258912567217686,
						"acc_norm,none": 0.7099183429595698,
						"acc_norm_stderr,none": 0.00452872395187824,
						"acc_stderr,none": 0.004983087049281747,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7087133708519309,
						"acc_stderr,none": 0.016850692633428886,
						"alias": "lambada",
						"perplexity,none": 3.8784881294998965,
						"perplexity_stderr,none": 0.23551879245757332
					},
					"lambada_multilingual": {
						"acc,none": 0.5369687560644285,
						"acc_stderr,none": 0.08430882353217248,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.70048891120678,
						"perplexity_stderr,none": 8.059465734784938
					},
					"lambada_openai": {
						"acc,none": 0.7417038618280614,
						"acc_stderr,none": 0.00609798426592076,
						"alias": " - lambada_openai",
						"perplexity,none": 3.4375618453455203,
						"perplexity_stderr,none": 0.06708642097396364
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42286046962934215,
						"acc_stderr,none": 0.0068825761876977875,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.04622392997292,
						"perplexity_stderr,none": 1.8391447472790967
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7403454298466913,
						"acc_stderr,none": 0.006108397042730499,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.4357348930487683,
						"perplexity_stderr,none": 0.06697837838975183
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45526877547059963,
						"acc_stderr,none": 0.006938045450999903,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.25835836046741,
						"perplexity_stderr,none": 1.3682644328702065
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5513293227246264,
						"acc_stderr,none": 0.006929173919665485,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.19056770863486,
						"perplexity_stderr,none": 0.7674221220205246
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.515039782650883,
						"acc_stderr,none": 0.006962825604553246,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.571559663909937,
						"perplexity_stderr,none": 1.1208902713252802
					},
					"lambada_standard": {
						"acc,none": 0.6774694352804191,
						"acc_stderr,none": 0.0065124194470117,
						"alias": " - lambada_standard",
						"perplexity,none": 4.321136452998951,
						"perplexity_stderr,none": 0.09207106981169363
					},
					"logiqa": {
						"acc,none": 0.22887864823348694,
						"acc_norm,none": 0.2872503840245776,
						"acc_norm_stderr,none": 0.017747701948846593,
						"acc_stderr,none": 0.016478107276313263,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.2829369035749893,
						"acc_stderr,none": 0.04449010491367579,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34074074074074073,
						"acc_stderr,none": 0.040943762699967926,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.29605263157894735,
						"acc_stderr,none": 0.03715062154998905,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3320754716981132,
						"acc_stderr,none": 0.028985455652334388,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2916666666666667,
						"acc_stderr,none": 0.038009680605548594,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909282,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036846,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.0416333199893227,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.21965317919075145,
						"acc_stderr,none": 0.031568093627031744,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.18627450980392157,
						"acc_stderr,none": 0.03873958714149352,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.33191489361702126,
						"acc_stderr,none": 0.030783736757745643,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.03999423879281338,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2689655172413793,
						"acc_stderr,none": 0.03695183311650232,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.022569897074918428,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.31746031746031744,
						"acc_stderr,none": 0.04163453031302859,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252605,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.34838709677419355,
						"acc_stderr,none": 0.02710482632810094,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2561576354679803,
						"acc_stderr,none": 0.030712730070982592,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3212121212121212,
						"acc_stderr,none": 0.03646204963253812,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.29292929292929293,
						"acc_stderr,none": 0.032424979581788166,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.34196891191709844,
						"acc_stderr,none": 0.03423465100104282,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.23846153846153847,
						"acc_stderr,none": 0.02160629449464773,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.27037037037037037,
						"acc_stderr,none": 0.02708037281514566,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2689075630252101,
						"acc_stderr,none": 0.028801392193631273,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.271523178807947,
						"acc_stderr,none": 0.03631329803969653,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.27706422018348625,
						"acc_stderr,none": 0.019188482590169538,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.18518518518518517,
						"acc_stderr,none": 0.026491914727355168,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.03198001660115071,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.28270042194092826,
						"acc_stderr,none": 0.029312814153955934,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3811659192825112,
						"acc_stderr,none": 0.03259625118416828,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.29770992366412213,
						"acc_stderr,none": 0.040103589424622034,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.27290116896918176,
						"acc_stderr,none": 0.034599615055619884,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.3140495867768595,
						"acc_stderr,none": 0.042369647530410184,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3148148148148148,
						"acc_stderr,none": 0.04489931073591312,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.26993865030674846,
						"acc_stderr,none": 0.034878251684978906,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.33035714285714285,
						"acc_stderr,none": 0.04464285714285713,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2912621359223301,
						"acc_stderr,none": 0.044986763205729224,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3247863247863248,
						"acc_stderr,none": 0.03067902276549883,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.3652618135376756,
						"acc_stderr,none": 0.01721853002883865,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.27167630057803466,
						"acc_stderr,none": 0.023948512905468365,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23910614525139665,
						"acc_stderr,none": 0.014265554192331154,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.026090162504279046,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.31412938525909234,
						"acc_stderr,none": 0.04595175653871959,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.33762057877813506,
						"acc_stderr,none": 0.02685882587948854,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2839506172839506,
						"acc_stderr,none": 0.025089478523765134,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25886524822695034,
						"acc_stderr,none": 0.026129572527180848,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2516297262059974,
						"acc_stderr,none": 0.011083276280441909,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.2426470588235294,
						"acc_stderr,none": 0.02604066247420126,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.27941176470588236,
						"acc_stderr,none": 0.018152871051538802,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.36363636363636365,
						"acc_stderr,none": 0.04607582090719976,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.025607375986579164,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.27331816704582385,
						"acc_stderr,none": 0.04044627009055694,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.2935323383084577,
						"acc_stderr,none": 0.03220024104534205,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2765620044402157,
						"acc_stderr,none": 0.052108661789192635,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909284,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3253012048192771,
						"acc_stderr,none": 0.03647168523683227,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.036155076303109365,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.36994396332144674,
						"acc_stderr,none": 0.004873427775227703,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.37164361269324653,
						"acc_stderr,none": 0.004873797777343952,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.021463642763705344,
						"alias": " - mrpc",
						"f1,none": 0.8386075949367089,
						"f1_stderr,none": 0.015771203789152668
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.034903047091412745,
						"exact_match_stderr,remove_whitespace": 0.0030550878483141374
					},
					"openbookqa": {
						"acc,none": 0.304,
						"acc_norm,none": 0.414,
						"acc_norm_stderr,none": 0.022049497969827865,
						"acc_stderr,none": 0.02059164957122493,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.403,
						"acc_stderr,none": 0.010970673536247517,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.01079853249993165,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.392,
						"acc_stderr,none": 0.01091913979244253,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.011131484850525779,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.543,
						"acc_stderr,none": 0.011141704034140802,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.011170245619215438,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5185,
						"acc_stderr,none": 0.011175478542788577,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4712142857142857,
						"acc_stderr,none": 0.060879199130928825,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7633297062023939,
						"acc_norm,none": 0.7758433079434167,
						"acc_norm_stderr,none": 0.009729897956410048,
						"acc_stderr,none": 0.009916841655042809,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7244796853908684,
						"acc_norm,none": 0.6125382387809819,
						"acc_norm_stderr,none": 0.0105401711439326,
						"acc_stderr,none": 0.15163443467559284,
						"alias": "pythia",
						"bits_per_byte,none": 0.6357501508537432,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5537454331657439,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4375618453455203,
						"perplexity_stderr,none": 0.06708642097396364,
						"word_perplexity,none": 10.553448186126827,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49917627677100496,
						"acc_stderr,none": 0.006765401370838248,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6348503586445708,
						"acc_stderr,none": 0.0023945539385203446,
						"alias": " - qqp",
						"f1,none": 0.6608312082155903,
						"f1_stderr,none": 0.002624772706029809
					},
					"record": {
						"alias": "record",
						"em,none": 0.2803,
						"em_stderr,none": 0.004491682148244163,
						"f1,none": 0.2906619050204754,
						"f1_stderr,none": 0.004500176107014154
					},
					"rte": {
						"acc,none": 0.6137184115523465,
						"acc_stderr,none": 0.029307720385270505,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.954,
						"acc_norm,none": 0.919,
						"acc_norm_stderr,none": 0.008632121032139946,
						"acc_stderr,none": 0.006627814717380707,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9013761467889908,
						"acc_stderr,none": 0.010102641365451156,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3094048220699125,
						"acc_stderr,none": 0.0014559091603162711,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.29253365973072215,
						"bleu_acc_stderr,none": 0.015925597445286165,
						"bleu_diff,none": -8.70527035582211,
						"bleu_diff_stderr,none": 0.8771346696462828,
						"bleu_max,none": 26.569192385016013,
						"bleu_max_stderr,none": 0.7964495055065042,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.01532182168847618,
						"rouge1_diff,none": -11.555716613496823,
						"rouge1_diff_stderr,none": 0.9270602103586496,
						"rouge1_max,none": 51.34875667118895,
						"rouge1_max_stderr,none": 0.8718549018689823,
						"rouge2_acc,none": 0.23133414932680538,
						"rouge2_acc_stderr,none": 0.014761945174862665,
						"rouge2_diff,none": -13.558803565520408,
						"rouge2_diff_stderr,none": 1.1237165646231728,
						"rouge2_max,none": 35.073233459869826,
						"rouge2_max_stderr,none": 1.0279496779038944,
						"rougeL_acc,none": 0.2558139534883721,
						"rougeL_acc_stderr,none": 0.015274176219283356,
						"rougeL_diff,none": -11.75666886040966,
						"rougeL_diff_stderr,none": 0.9390181874731942,
						"rougeL_max,none": 48.64655737859748,
						"rougeL_max_stderr,none": 0.8948807147169482
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.29253365973072215,
						"bleu_acc_stderr,none": 0.015925597445286165,
						"bleu_diff,none": -8.70527035582211,
						"bleu_diff_stderr,none": 0.8771346696462828,
						"bleu_max,none": 26.569192385016013,
						"bleu_max_stderr,none": 0.7964495055065042,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.01532182168847618,
						"rouge1_diff,none": -11.555716613496823,
						"rouge1_diff_stderr,none": 0.9270602103586496,
						"rouge1_max,none": 51.34875667118895,
						"rouge1_max_stderr,none": 0.8718549018689823,
						"rouge2_acc,none": 0.23133414932680538,
						"rouge2_acc_stderr,none": 0.014761945174862665,
						"rouge2_diff,none": -13.558803565520408,
						"rouge2_diff_stderr,none": 1.1237165646231728,
						"rouge2_max,none": 35.073233459869826,
						"rouge2_max_stderr,none": 1.0279496779038944,
						"rougeL_acc,none": 0.2558139534883721,
						"rougeL_acc_stderr,none": 0.015274176219283356,
						"rougeL_diff,none": -11.75666886040966,
						"rougeL_diff_stderr,none": 0.9390181874731942,
						"rougeL_max,none": 48.64655737859748,
						"rougeL_max_stderr,none": 0.8948807147169482
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2386780905752754,
						"acc_stderr,none": 0.014922629695456416,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.38013155356454964,
						"acc_stderr,none": 0.013664452250003548,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6357501508537432,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5537454331657439,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.553448186126827,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.675611681136543,
						"acc_stderr,none": 0.013157225726641639,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5211267605633803,
						"acc_stderr,none": 0.05970805879899505,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5288461538461539,
						"acc_stderr,none": 0.04918440626354964,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6245454545454545,
						"acc_stderr,none": 0.07303174156027332,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.021874299301689257,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.022331264423258383,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.020011219298073535,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.019384310743640384,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269948,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.021352091786223104,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.02047511809298897,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.020354375480530085,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.44109772423025434,
						"acc_stderr,none": 0.054615893307441855,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3369477911646586,
						"acc_stderr,none": 0.009474203778757706,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4907630522088353,
						"acc_stderr,none": 0.010020362530631355,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4963855421686747,
						"acc_stderr,none": 0.010021811000966357,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39799196787148594,
						"acc_stderr,none": 0.009811284026425582,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5421686746987951,
						"acc_stderr,none": 0.00998636681919649,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5116465863453815,
						"acc_stderr,none": 0.010019353650807701,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5132530120481927,
						"acc_stderr,none": 0.010018551648218466,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41847389558232934,
						"acc_stderr,none": 0.009887951897505942,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4899598393574297,
						"acc_stderr,none": 0.010020052116889137,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3895582329317269,
						"acc_stderr,none": 0.009774529590783664,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41325301204819276,
						"acc_stderr,none": 0.009870087435623778,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44859437751004017,
						"acc_stderr,none": 0.009968964736894265,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40321285140562246,
						"acc_stderr,none": 0.009832511560868075,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.4072289156626506,
						"acc_stderr,none": 0.009848052628967676,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3570281124497992,
						"acc_stderr,none": 0.009603615216109774,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6202394561097407,
						"acc_stderr,none": 0.06422614584228693,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5890138980807412,
						"acc_stderr,none": 0.012661578894368947,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7789543348775645,
						"acc_stderr,none": 0.010678457581809242,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7028457974851092,
						"acc_stderr,none": 0.011760681560041933,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5499669093315684,
						"acc_stderr,none": 0.012802713598219839,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5956320317670417,
						"acc_stderr,none": 0.012629580396570942,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6485771012574454,
						"acc_stderr,none": 0.01228591087173833,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5181998676373263,
						"acc_stderr,none": 0.012858598401831848,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6750496360026472,
						"acc_stderr,none": 0.012052798442200212,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5433487756452681,
						"acc_stderr,none": 0.012818676452481956,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.587028457974851,
						"acc_stderr,none": 0.01267071629096672,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6340172071475844,
						"acc_stderr,none": 0.012396308684399376,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8096201393571589,
						"acc_stderr,none": 0.036979114244393055,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8701075268817204,
						"acc_stderr,none": 0.006973653965627702,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6867469879518072,
						"acc_stderr,none": 0.05121994210658146,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7361835245046924,
						"acc_stderr,none": 0.01423840196636841,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7604562737642585,
						"acc_stderr,none": 0.02636810251019086,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6952380952380952,
						"acc_stderr,none": 0.025976599352305375,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7876984126984127,
						"acc_stderr,none": 0.018233607978187166,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/R4-7B-15t-No-Mask_pth"
	},
	"./rwkv-x-dev/R4-7B-15t-With-Mask_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6133032694475761,
						"acc_norm,none": 0.6003382187147689,
						"acc_norm_stderr,none": 0.09164685559700163,
						"acc_stderr,none": 0.10924764755670051,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.359375,
						"acc_stderr,none": 0.0148940422978907,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8207910447761194,
						"acc_stderr,none": 0.1621716666525245,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2956311517872561,
						"acc_norm,none": 0.2956311517872561,
						"acc_norm_stderr,none": 0.05272171405606332,
						"acc_stderr,none": 0.05272171405606332,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5243717246307763,
						"acc_stderr,none": 0.00958167411881355,
						"alias": "glue",
						"f1,none": 0.6448678943746179,
						"f1_stderr,none": 0.00032846112320663933,
						"mcc,none": -0.02929206145132745,
						"mcc_stderr,none": 0.012900535086889195
					},
					"lambada": {
						"acc,none": 0.7079371240054337,
						"acc_stderr,none": 0.014896707966353317,
						"alias": "lambada",
						"perplexity,none": 3.925388676517947,
						"perplexity_stderr,none": 0.2454148616659558
					},
					"lambada_multilingual": {
						"acc,none": 0.5348340772365613,
						"acc_stderr,none": 0.08298182190389869,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.057643358458623,
						"perplexity_stderr,none": 8.224885129200231
					},
					"mmlu": {
						"acc,none": 0.28642643498077197,
						"acc_stderr,none": 0.044956417386786886,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.27948990435706694,
						"acc_stderr,none": 0.03856536908758936,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.31702607016414547,
						"acc_stderr,none": 0.044086852238323024,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.27916802079948,
						"acc_stderr,none": 0.04129296189870555,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2737075800824611,
						"acc_stderr,none": 0.05067915603581962,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.47514285714285714,
						"acc_stderr,none": 0.05673739079035622,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.723158584370121,
						"acc_norm,none": 0.6049791602075762,
						"acc_norm_stderr,none": 0.010429683787206016,
						"acc_stderr,none": 0.15212026148018817,
						"alias": "pythia",
						"bits_per_byte,none": 0.6356746918240135,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.553664167857842,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4641518277296095,
						"perplexity_stderr,none": 0.06796910547530312,
						"word_perplexity,none": 10.550496860622095,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3128397125459066,
						"acc_stderr,none": 0.0015366386124427795,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2937576499388005,
						"bleu_acc_stderr,none": 0.015945068581236614,
						"bleu_diff,none": -8.826008357623458,
						"bleu_diff_stderr,none": 0.873430964636525,
						"bleu_max,none": 26.335714023537488,
						"bleu_max_stderr,none": 0.7925583368350522,
						"rouge1_acc,none": 0.26560587515299877,
						"rouge1_acc_stderr,none": 0.015461027627253588,
						"rouge1_diff,none": -11.377962611661255,
						"rouge1_diff_stderr,none": 0.9173477769796888,
						"rouge1_max,none": 51.4493560554275,
						"rouge1_max_stderr,none": 0.868164675064977,
						"rouge2_acc,none": 0.23011015911872704,
						"rouge2_acc_stderr,none": 0.01473455795980776,
						"rouge2_diff,none": -13.510978818653996,
						"rouge2_diff_stderr,none": 1.1134588300520247,
						"rouge2_max,none": 35.16995422667944,
						"rouge2_max_stderr,none": 1.020391165323557,
						"rougeL_acc,none": 0.2558139534883721,
						"rougeL_acc_stderr,none": 0.015274176219283354,
						"rougeL_diff,none": -11.797783849763169,
						"rougeL_diff_stderr,none": 0.9288797098832609,
						"rougeL_max,none": 48.539242198993996,
						"rougeL_max_stderr,none": 0.8874440491570758
					},
					"xcopa": {
						"acc,none": 0.6265454545454545,
						"acc_stderr,none": 0.07168157957200158,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4417135207496653,
						"acc_stderr,none": 0.05479332333154572,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6227663798808737,
						"acc_stderr,none": 0.06405118121167949,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8170375365250618,
						"acc_stderr,none": 0.035523195067269364,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6133032694475761,
						"acc_norm,none": 0.6003382187147689,
						"acc_norm_stderr,none": 0.09164685559700163,
						"acc_stderr,none": 0.10924764755670051,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.359375,
						"acc_stderr,none": 0.0148940422978907,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.364,
						"acc_stderr,none": 0.015222868840522022,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.01518652793204012,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.355,
						"acc_stderr,none": 0.0138192490040473,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3822525597269625,
						"acc_norm,none": 0.4069965870307167,
						"acc_norm_stderr,none": 0.014356399418009131,
						"acc_stderr,none": 0.014200454049979298,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7272727272727273,
						"acc_norm,none": 0.6957070707070707,
						"acc_norm_stderr,none": 0.009441202922359183,
						"acc_stderr,none": 0.009138630726364231,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8207910447761194,
						"acc_stderr,none": 0.1621716666525245,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.00932045443478321,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998353,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.826,
						"acc_stderr,none": 0.011994493230973425,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024964,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.761,
						"acc_stderr,none": 0.013493000446937596,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.565,
						"acc_stderr,none": 0.0156850572527172,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.796,
						"acc_stderr,none": 0.012749374359024394,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397334,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987286,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030093,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.00655881224140611,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140919,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333443,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704163,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910642,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724427,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.01438551156347735,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.013877773329774166,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.013512312258920828,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.0073953154557929324,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.010605256784796553,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.00396985639031942,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.262,
						"acc_stderr,none": 0.01391220865102135,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592091,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.01267910721461733,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.607,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.772,
						"acc_stderr,none": 0.013273740700804487,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.0076870078762864245,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381791,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244055,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491113,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.015387682761897068,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248118,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.441,
						"acc_stderr,none": 0.015708779894242676,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.565,
						"acc_stderr,none": 0.015685057252717204,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696867,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.607,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524312,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.00961683333969579,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.013232501619085341,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919281,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346939,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745899,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.817,
						"acc_stderr,none": 0.012233587399477823,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.487,
						"acc_stderr,none": 0.015813952101896626,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639224,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.00989939381972442,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767624,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.014078856992462615,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.455,
						"acc_stderr,none": 0.01575510149834709,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866435,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427425,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.614,
						"acc_stderr,none": 0.015402637476784364,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.010605256784796563,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.010605256784796574,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.809,
						"acc_stderr,none": 0.012436787112179475,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.011171786285496501,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639239,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315141,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910606,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275288,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.401,
						"acc_stderr,none": 0.015506109745498325,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.014933117490932579,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2956311517872561,
						"acc_norm,none": 0.2956311517872561,
						"acc_norm_stderr,none": 0.05272171405606332,
						"acc_stderr,none": 0.05272171405606332,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331529,
						"acc_stderr,none": 0.03360300796331529,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.3108108108108108,
						"acc_norm,none": 0.3108108108108108,
						"acc_norm_stderr,none": 0.03817320450441154,
						"acc_stderr,none": 0.03817320450441154,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.40625,
						"acc_norm,none": 0.40625,
						"acc_norm_stderr,none": 0.03894932504400619,
						"acc_stderr,none": 0.03894932504400619,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.03567969772268047,
						"acc_stderr,none": 0.03567969772268047,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3444976076555024,
						"acc_norm,none": 0.3444976076555024,
						"acc_norm_stderr,none": 0.03294948099678349,
						"acc_stderr,none": 0.03294948099678349,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2824427480916031,
						"acc_norm,none": 0.2824427480916031,
						"acc_norm_stderr,none": 0.03948406125768361,
						"acc_stderr,none": 0.03948406125768361,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.04071874442606894,
						"acc_stderr,none": 0.04071874442606894,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3364485981308411,
						"acc_norm,none": 0.3364485981308411,
						"acc_norm_stderr,none": 0.045892711114716274,
						"acc_stderr,none": 0.045892711114716274,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.02590393636568494,
						"acc_stderr,none": 0.02590393636568494,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2696078431372549,
						"acc_norm,none": 0.2696078431372549,
						"acc_norm_stderr,none": 0.031145570659486782,
						"acc_stderr,none": 0.031145570659486782,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2849162011173184,
						"acc_norm,none": 0.2849162011173184,
						"acc_norm_stderr,none": 0.03383195081328525,
						"acc_stderr,none": 0.03383195081328525,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.28270042194092826,
						"acc_norm,none": 0.28270042194092826,
						"acc_norm_stderr,none": 0.029312814153955924,
						"acc_stderr,none": 0.029312814153955924,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.1509433962264151,
						"acc_norm,none": 0.1509433962264151,
						"acc_norm_stderr,none": 0.0349366075385868,
						"acc_stderr,none": 0.0349366075385868,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.38317757009345793,
						"acc_norm,none": 0.38317757009345793,
						"acc_norm_stderr,none": 0.047220130807712334,
						"acc_stderr,none": 0.047220130807712334,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3867924528301887,
						"acc_norm,none": 0.3867924528301887,
						"acc_norm_stderr,none": 0.04752784159123842,
						"acc_stderr,none": 0.04752784159123842,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2037037037037037,
						"acc_norm,none": 0.2037037037037037,
						"acc_norm_stderr,none": 0.03893542518824847,
						"acc_stderr,none": 0.03893542518824847,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055041,
						"acc_stderr,none": 0.04232473532055041,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.04084247315337099,
						"acc_stderr,none": 0.04084247315337099,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.027391606744548987,
						"acc_stderr,none": 0.027391606744548987,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.31862745098039214,
						"acc_norm,none": 0.31862745098039214,
						"acc_norm_stderr,none": 0.0327028718148208,
						"acc_stderr,none": 0.0327028718148208,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.23976608187134502,
						"acc_norm,none": 0.23976608187134502,
						"acc_norm_stderr,none": 0.032744852119469564,
						"acc_stderr,none": 0.032744852119469564,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.03814280082617515,
						"acc_stderr,none": 0.03814280082617515,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.31654676258992803,
						"acc_norm,none": 0.31654676258992803,
						"acc_norm_stderr,none": 0.03959440284735793,
						"acc_stderr,none": 0.03959440284735793,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.31446540880503143,
						"acc_norm,none": 0.31446540880503143,
						"acc_norm_stderr,none": 0.03693793250042286,
						"acc_stderr,none": 0.03693793250042286,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2883435582822086,
						"acc_norm,none": 0.2883435582822086,
						"acc_norm_stderr,none": 0.035590395316173425,
						"acc_stderr,none": 0.035590395316173425,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.29069767441860467,
						"acc_norm,none": 0.29069767441860467,
						"acc_norm_stderr,none": 0.03472469304477597,
						"acc_stderr,none": 0.03472469304477597,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.028746730632681374,
						"acc_stderr,none": 0.028746730632681374,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2676767676767677,
						"acc_norm,none": 0.2676767676767677,
						"acc_norm_stderr,none": 0.03154449888270286,
						"acc_stderr,none": 0.03154449888270286,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3949579831932773,
						"acc_norm,none": 0.3949579831932773,
						"acc_norm_stderr,none": 0.03175367846096625,
						"acc_stderr,none": 0.03175367846096625,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.028361099300075063,
						"acc_stderr,none": 0.028361099300075063,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.34265734265734266,
						"acc_norm,none": 0.34265734265734266,
						"acc_norm_stderr,none": 0.039827381778096436,
						"acc_stderr,none": 0.039827381778096436,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03366618544627455,
						"acc_stderr,none": 0.03366618544627455,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.30201342281879195,
						"acc_norm,none": 0.30201342281879195,
						"acc_norm_stderr,none": 0.03774033930941344,
						"acc_stderr,none": 0.03774033930941344,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676976,
						"acc_stderr,none": 0.03410167836676976,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552485,
						"acc_stderr,none": 0.03703667194552485,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2796610169491525,
						"acc_norm,none": 0.2796610169491525,
						"acc_norm_stderr,none": 0.04149459161011112,
						"acc_stderr,none": 0.04149459161011112,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814535,
						"acc_stderr,none": 0.03470398212814535,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.04069306319721376,
						"acc_stderr,none": 0.04069306319721376,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695624,
						"acc_stderr,none": 0.03737392962695624,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.24603174603174602,
						"acc_norm,none": 0.24603174603174602,
						"acc_norm_stderr,none": 0.03852273364924316,
						"acc_stderr,none": 0.03852273364924316,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336699,
						"acc_stderr,none": 0.03333068663336699,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3372093023255814,
						"acc_norm,none": 0.3372093023255814,
						"acc_norm_stderr,none": 0.03615263198871636,
						"acc_stderr,none": 0.03615263198871636,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.30900243309002434,
						"acc_norm,none": 0.30900243309002434,
						"acc_norm_stderr,none": 0.022820611641536453,
						"acc_stderr,none": 0.022820611641536453,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3691588785046729,
						"acc_norm,none": 0.3691588785046729,
						"acc_norm_stderr,none": 0.03306563404172723,
						"acc_stderr,none": 0.03306563404172723,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.04011374393621146,
						"acc_stderr,none": 0.04011374393621146,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3524590163934426,
						"acc_norm,none": 0.3524590163934426,
						"acc_norm_stderr,none": 0.0434305428342706,
						"acc_stderr,none": 0.0434305428342706,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.03211151353994381,
						"acc_stderr,none": 0.03211151353994381,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.32222222222222224,
						"acc_norm,none": 0.32222222222222224,
						"acc_norm_stderr,none": 0.03492970288642683,
						"acc_stderr,none": 0.03492970288642683,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03330267393083602,
						"acc_stderr,none": 0.03330267393083602,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3017241379310345,
						"acc_norm,none": 0.3017241379310345,
						"acc_norm_stderr,none": 0.0428025479250546,
						"acc_stderr,none": 0.0428025479250546,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2896551724137931,
						"acc_norm,none": 0.2896551724137931,
						"acc_norm_stderr,none": 0.03780019230438014,
						"acc_stderr,none": 0.03780019230438014,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.045521571818039494,
						"acc_stderr,none": 0.045521571818039494,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.3028571428571429,
						"acc_norm,none": 0.3028571428571429,
						"acc_norm_stderr,none": 0.03483414676585986,
						"acc_stderr,none": 0.03483414676585986,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846666,
						"acc_stderr,none": 0.030469670650846666,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.022360679774997897,
						"acc_stderr,none": 0.022360679774997897,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.33189655172413796,
						"acc_norm,none": 0.33189655172413796,
						"acc_norm_stderr,none": 0.030982555535700885,
						"acc_stderr,none": 0.030982555535700885,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3275862068965517,
						"acc_norm,none": 0.3275862068965517,
						"acc_norm_stderr,none": 0.03568272877241248,
						"acc_stderr,none": 0.03568272877241248,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03785714465066653,
						"acc_stderr,none": 0.03785714465066653,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3141592920353982,
						"acc_norm,none": 0.3141592920353982,
						"acc_norm_stderr,none": 0.030945344741493033,
						"acc_stderr,none": 0.030945344741493033,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.035679697722680474,
						"acc_stderr,none": 0.035679697722680474,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.3081081081081081,
						"acc_norm,none": 0.3081081081081081,
						"acc_norm_stderr,none": 0.034037822778343836,
						"acc_stderr,none": 0.034037822778343836,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3136094674556213,
						"acc_norm,none": 0.3136094674556213,
						"acc_norm_stderr,none": 0.035795265164562245,
						"acc_stderr,none": 0.035795265164562245,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.03637652289278585,
						"acc_stderr,none": 0.03637652289278585,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.035156741348767645,
						"acc_stderr,none": 0.035156741348767645,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.02929206145132745,
						"mcc_stderr,none": 0.012900535086889195
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.03379976689896309,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5243717246307763,
						"acc_stderr,none": 0.00958167411881355,
						"alias": "glue",
						"f1,none": 0.6448678943746179,
						"f1_stderr,none": 0.00032846112320663933,
						"mcc,none": -0.02929206145132745,
						"mcc_stderr,none": 0.012900535086889195
					},
					"hellaswag": {
						"acc,none": 0.5265883290181239,
						"acc_norm,none": 0.7101175064728141,
						"acc_norm_stderr,none": 0.004527804016253783,
						"acc_stderr,none": 0.00498272147240734,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7079371240054337,
						"acc_stderr,none": 0.014896707966353317,
						"alias": "lambada",
						"perplexity,none": 3.925388676517947,
						"perplexity_stderr,none": 0.2454148616659558
					},
					"lambada_multilingual": {
						"acc,none": 0.5348340772365613,
						"acc_stderr,none": 0.08298182190389869,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.057643358458623,
						"perplexity_stderr,none": 8.224885129200231
					},
					"lambada_openai": {
						"acc,none": 0.736852319037454,
						"acc_stderr,none": 0.006134823516779081,
						"alias": " - lambada_openai",
						"perplexity,none": 3.4641518277296095,
						"perplexity_stderr,none": 0.06796910547530312
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42227828449446925,
						"acc_stderr,none": 0.00688130477337688,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.65932266014551,
						"perplexity_stderr,none": 1.8746810780939713
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7349117019212109,
						"acc_stderr,none": 0.0061492894021581606,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.463056984285738,
						"perplexity_stderr,none": 0.067904108387925
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45429846691247816,
						"acc_stderr,none": 0.0069368179239633,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.882021004777148,
						"perplexity_stderr,none": 1.3975385358255945
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5476421502037648,
						"acc_stderr,none": 0.00693428315721904,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.445780897552503,
						"perplexity_stderr,none": 0.7811412063562879
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.515039782650883,
						"acc_stderr,none": 0.006962825604553246,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.838035245532215,
						"perplexity_stderr,none": 1.138071552928094
					},
					"lambada_standard": {
						"acc,none": 0.6809625460896566,
						"acc_stderr,none": 0.006493734392265256,
						"alias": " - lambada_standard",
						"perplexity,none": 4.387902403977054,
						"perplexity_stderr,none": 0.09422109983611744
					},
					"logiqa": {
						"acc,none": 0.23195084485407066,
						"acc_norm,none": 0.28110599078341014,
						"acc_norm_stderr,none": 0.01763237462646,
						"acc_stderr,none": 0.016555252497925898,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.28642643498077197,
						"acc_stderr,none": 0.044956417386786886,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.3111111111111111,
						"acc_stderr,none": 0.0399926287661772,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3026315789473684,
						"acc_stderr,none": 0.03738520676119667,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3132075471698113,
						"acc_stderr,none": 0.02854479331905533,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3263888888888889,
						"acc_stderr,none": 0.03921067198982266,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.04093601807403326,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.23699421965317918,
						"acc_stderr,none": 0.03242414757483098,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.04092563958237655,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.33191489361702126,
						"acc_stderr,none": 0.030783736757745647,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.040493392977481404,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2689655172413793,
						"acc_stderr,none": 0.03695183311650232,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2619047619047619,
						"acc_stderr,none": 0.022644212615525218,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.04040610178208841,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3419354838709677,
						"acc_stderr,none": 0.026985289576552732,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2660098522167488,
						"acc_stderr,none": 0.03108982600293752,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3696969696969697,
						"acc_stderr,none": 0.03769430314512568,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.31313131313131315,
						"acc_stderr,none": 0.033042050878136525,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.33678756476683935,
						"acc_stderr,none": 0.03410780251836184,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.24102564102564103,
						"acc_stderr,none": 0.021685546665333198,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23703703703703705,
						"acc_stderr,none": 0.025928876132766097,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.028657491285071966,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.28440366972477066,
						"acc_stderr,none": 0.01934203658770258,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.19444444444444445,
						"acc_stderr,none": 0.026991454502036716,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.30392156862745096,
						"acc_stderr,none": 0.032282103870378914,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3080168776371308,
						"acc_stderr,none": 0.0300523893356057,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3542600896860987,
						"acc_stderr,none": 0.032100621541349864,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2748091603053435,
						"acc_stderr,none": 0.039153454088478354,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.27948990435706694,
						"acc_stderr,none": 0.03856536908758936,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.3305785123966942,
						"acc_stderr,none": 0.04294340845212093,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.32407407407407407,
						"acc_stderr,none": 0.045245960070300496,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2883435582822086,
						"acc_stderr,none": 0.035590395316173425,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841044,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2912621359223301,
						"acc_stderr,none": 0.044986763205729224,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3418803418803419,
						"acc_stderr,none": 0.03107502852650775,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542126,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.38058748403575987,
						"acc_stderr,none": 0.017362564126075425,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.27167630057803466,
						"acc_stderr,none": 0.023948512905468358,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2346368715083799,
						"acc_stderr,none": 0.014173044098303673,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.28104575163398693,
						"acc_stderr,none": 0.025738854797818723,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.31702607016414547,
						"acc_stderr,none": 0.044086852238323024,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.33762057877813506,
						"acc_stderr,none": 0.026858825879488544,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2993827160493827,
						"acc_stderr,none": 0.025483115601195466,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2553191489361702,
						"acc_stderr,none": 0.026011992930902002,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.25945241199478486,
						"acc_stderr,none": 0.011195262076350318,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.2536764705882353,
						"acc_stderr,none": 0.026431329870789538,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.28104575163398693,
						"acc_stderr,none": 0.018185218954318082,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.39090909090909093,
						"acc_stderr,none": 0.04673752333670238,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.20816326530612245,
						"acc_stderr,none": 0.025991117672813296,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.27916802079948,
						"acc_stderr,none": 0.04129296189870555,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.31840796019900497,
						"acc_stderr,none": 0.03294118479054095,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2737075800824611,
						"acc_stderr,none": 0.05067915603581962,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.044084400227680794,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.03680783690727581,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.03615507630310936,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3515028018339277,
						"acc_stderr,none": 0.004819427159655223,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.35231895850284783,
						"acc_stderr,none": 0.004817810913897394,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.021463642763705344,
						"alias": " - mrpc",
						"f1,none": 0.8386075949367089,
						"f1_stderr,none": 0.015782575179639127
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.027977839335180055,
						"exact_match_stderr,remove_whitespace": 0.002745059283264663
					},
					"openbookqa": {
						"acc,none": 0.306,
						"acc_norm,none": 0.408,
						"acc_norm_stderr,none": 0.02200091089387719,
						"acc_stderr,none": 0.020629569998345393,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4175,
						"acc_stderr,none": 0.011029855114729358,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3785,
						"acc_stderr,none": 0.010847935926107406,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.010957190790298965,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5475,
						"acc_stderr,none": 0.011132557743886098,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.547,
						"acc_stderr,none": 0.011133619300989868,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.011172305500884874,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5135,
						"acc_stderr,none": 0.011179059024816817,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.47514285714285714,
						"acc_stderr,none": 0.05673739079035622,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7578890097932536,
						"acc_norm,none": 0.7671381936887922,
						"acc_norm_stderr,none": 0.009861236071080751,
						"acc_stderr,none": 0.009994371269104376,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.723158584370121,
						"acc_norm,none": 0.6049791602075762,
						"acc_norm_stderr,none": 0.010429683787206016,
						"acc_stderr,none": 0.15212026148018817,
						"alias": "pythia",
						"bits_per_byte,none": 0.6356746918240135,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.553664167857842,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4641518277296095,
						"perplexity_stderr,none": 0.06796910547530312,
						"word_perplexity,none": 10.550496860622095,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.4993593263774483,
						"acc_stderr,none": 0.006765404997877069,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6005194162750432,
						"acc_stderr,none": 0.002435930342578943,
						"alias": " - qqp",
						"f1,none": 0.6431901027283773,
						"f1_stderr,none": 0.0026235298359600923
					},
					"record": {
						"alias": "record",
						"em,none": 0.2779,
						"em_stderr,none": 0.004479862265359736,
						"f1,none": 0.28789523834586145,
						"f1_stderr,none": 0.004488748647455379
					},
					"rte": {
						"acc,none": 0.6353790613718412,
						"acc_stderr,none": 0.028972282465132403,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.948,
						"acc_norm,none": 0.917,
						"acc_norm_stderr,none": 0.00872852720607479,
						"acc_stderr,none": 0.007024624213817139,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9002293577981652,
						"acc_stderr,none": 0.010154741963033091,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3128397125459066,
						"acc_stderr,none": 0.0015366386124427795,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2937576499388005,
						"bleu_acc_stderr,none": 0.015945068581236614,
						"bleu_diff,none": -8.826008357623458,
						"bleu_diff_stderr,none": 0.873430964636525,
						"bleu_max,none": 26.335714023537488,
						"bleu_max_stderr,none": 0.7925583368350522,
						"rouge1_acc,none": 0.26560587515299877,
						"rouge1_acc_stderr,none": 0.015461027627253588,
						"rouge1_diff,none": -11.377962611661255,
						"rouge1_diff_stderr,none": 0.9173477769796888,
						"rouge1_max,none": 51.4493560554275,
						"rouge1_max_stderr,none": 0.868164675064977,
						"rouge2_acc,none": 0.23011015911872704,
						"rouge2_acc_stderr,none": 0.01473455795980776,
						"rouge2_diff,none": -13.510978818653996,
						"rouge2_diff_stderr,none": 1.1134588300520247,
						"rouge2_max,none": 35.16995422667944,
						"rouge2_max_stderr,none": 1.020391165323557,
						"rougeL_acc,none": 0.2558139534883721,
						"rougeL_acc_stderr,none": 0.015274176219283354,
						"rougeL_diff,none": -11.797783849763169,
						"rougeL_diff_stderr,none": 0.9288797098832609,
						"rougeL_max,none": 48.539242198993996,
						"rougeL_max_stderr,none": 0.8874440491570758
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2937576499388005,
						"bleu_acc_stderr,none": 0.015945068581236614,
						"bleu_diff,none": -8.826008357623458,
						"bleu_diff_stderr,none": 0.873430964636525,
						"bleu_max,none": 26.335714023537488,
						"bleu_max_stderr,none": 0.7925583368350522,
						"rouge1_acc,none": 0.26560587515299877,
						"rouge1_acc_stderr,none": 0.015461027627253588,
						"rouge1_diff,none": -11.377962611661255,
						"rouge1_diff_stderr,none": 0.9173477769796888,
						"rouge1_max,none": 51.4493560554275,
						"rouge1_max_stderr,none": 0.868164675064977,
						"rouge2_acc,none": 0.23011015911872704,
						"rouge2_acc_stderr,none": 0.01473455795980776,
						"rouge2_diff,none": -13.510978818653996,
						"rouge2_diff_stderr,none": 1.1134588300520247,
						"rouge2_max,none": 35.16995422667944,
						"rouge2_max_stderr,none": 1.020391165323557,
						"rougeL_acc,none": 0.2558139534883721,
						"rougeL_acc_stderr,none": 0.015274176219283354,
						"rougeL_diff,none": -11.797783849763169,
						"rougeL_diff_stderr,none": 0.9288797098832609,
						"rougeL_max,none": 48.539242198993996,
						"rougeL_max_stderr,none": 0.8874440491570758
					},
					"truthfulqa_mc1": {
						"acc,none": 0.23990208078335373,
						"acc_stderr,none": 0.01494881267906214,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3857773443084594,
						"acc_stderr,none": 0.013728967727472745,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6356746918240135,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.553664167857842,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.550496860622095,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6708760852407262,
						"acc_stderr,none": 0.01320638708909146,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5576923076923077,
						"acc_stderr,none": 0.04893740777700999,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6265454545454545,
						"acc_stderr,none": 0.07168157957200158,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.021834685869369208,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.022324981738385253,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.020055833888070904,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.754,
						"acc_stderr,none": 0.01927981905635255,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.022279694107843424,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.022017482578127676,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.021513662527582404,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.02051442622562804,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177514,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4417135207496653,
						"acc_stderr,none": 0.05479332333154572,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939166,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4911646586345382,
						"acc_stderr,none": 0.010020508033762626,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.5028112449799197,
						"acc_stderr,none": 0.010021914455122176,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.40481927710843374,
						"acc_stderr,none": 0.009838809968433934,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5445783132530121,
						"acc_stderr,none": 0.009982161147576338,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5084337349397591,
						"acc_stderr,none": 0.010020647068114183,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5108433734939759,
						"acc_stderr,none": 0.010019715824483482,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41967871485943775,
						"acc_stderr,none": 0.009891912665432361,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4879518072289157,
						"acc_stderr,none": 0.010019162857624485,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3887550200803213,
						"acc_stderr,none": 0.00977086942344149,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41526104417670684,
						"acc_stderr,none": 0.009877093420328584,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44819277108433736,
						"acc_stderr,none": 0.009968129426909876,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40522088353413654,
						"acc_stderr,none": 0.009840367477589276,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.4108433734939759,
						"acc_stderr,none": 0.00986145684149083,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3526104417670683,
						"acc_stderr,none": 0.009576746271768752,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6227663798808737,
						"acc_stderr,none": 0.06405118121167949,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5943084050297816,
						"acc_stderr,none": 0.012636170220503925,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7782925215089345,
						"acc_stderr,none": 0.010689887294959689,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7101257445400397,
						"acc_stderr,none": 0.011675728247959368,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5545996029119789,
						"acc_stderr,none": 0.012790178438084812,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5943084050297816,
						"acc_stderr,none": 0.012636170220503925,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6538716082064858,
						"acc_stderr,none": 0.012242676637496357,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5215089344804765,
						"acc_stderr,none": 0.012855214257296611,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6776968894771674,
						"acc_stderr,none": 0.01202711658810771,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5466578424884183,
						"acc_stderr,none": 0.012810980537828164,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.585704831237591,
						"acc_stderr,none": 0.012676689821720669,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6333553937789543,
						"acc_stderr,none": 0.012401034429990696,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8170375365250618,
						"acc_stderr,none": 0.035523195067269364,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.873978494623656,
						"acc_stderr,none": 0.006884218449880485,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7228915662650602,
						"acc_stderr,none": 0.04942589299783093,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7497393117831074,
						"acc_stderr,none": 0.013994864706473825,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7908745247148289,
						"acc_stderr,none": 0.025125031682933383,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.692063492063492,
						"acc_stderr,none": 0.026051860027264458,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7896825396825397,
						"acc_stderr,none": 0.018171046497690278,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/R4-7B-15t-With-Mask_pth"
	},
	"./rwkv-x-dev/R4-7B-15t-extd-e2_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6175310033821871,
						"acc_norm,none": 0.616685456595265,
						"acc_norm_stderr,none": 0.09289779725397056,
						"acc_stderr,none": 0.11042489578410247,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.365,
						"acc_stderr,none": 0.014749272239807062,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8248059701492537,
						"acc_stderr,none": 0.16041833086955387,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2951994474184078,
						"acc_norm,none": 0.2951994474184078,
						"acc_norm_stderr,none": 0.051494108725818784,
						"acc_stderr,none": 0.051494108725818784,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5792341591233922,
						"acc_stderr,none": 0.009549615252674835,
						"alias": "glue",
						"f1,none": 0.6760970082860837,
						"f1_stderr,none": 0.00023375532941714104,
						"mcc,none": -0.016334423518002312,
						"mcc_stderr,none": 0.02702956160115033
					},
					"lambada": {
						"acc,none": 0.7079371240054337,
						"acc_stderr,none": 0.016225371123469567,
						"alias": "lambada",
						"perplexity,none": 3.824733600965998,
						"perplexity_stderr,none": 0.21265282709924802
					},
					"lambada_multilingual": {
						"acc,none": 0.536541820298855,
						"acc_stderr,none": 0.08337101181286767,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.673213351045558,
						"perplexity_stderr,none": 8.033475296927634
					},
					"mmlu": {
						"acc,none": 0.27823671841618003,
						"acc_stderr,none": 0.04358180301387528,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.26950053134962804,
						"acc_stderr,none": 0.03475903523979383,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3070485999356293,
						"acc_stderr,none": 0.04739119950580957,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2739681507962301,
						"acc_stderr,none": 0.03677986501944946,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.26704725658103395,
						"acc_stderr,none": 0.050621563181111266,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4719285714285714,
						"acc_stderr,none": 0.05840345547343853,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7251034386574691,
						"acc_norm,none": 0.6210467091864162,
						"acc_norm_stderr,none": 0.010571662389924822,
						"acc_stderr,none": 0.15094906760547522,
						"alias": "pythia",
						"bits_per_byte,none": 0.6359161607191486,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5539242318056556,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.430241698322102,
						"perplexity_stderr,none": 0.06653347425689152,
						"word_perplexity,none": 10.55994400861837,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3071806732958668,
						"acc_stderr,none": 0.0015057945036115713,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3023255813953488,
						"bleu_acc_stderr,none": 0.016077509266133026,
						"bleu_diff,none": -8.301944705905596,
						"bleu_diff_stderr,none": 0.865844360712933,
						"bleu_max,none": 26.601520835600947,
						"bleu_max_stderr,none": 0.8079363184976165,
						"rouge1_acc,none": 0.2607099143206854,
						"rouge1_acc_stderr,none": 0.015368841620766367,
						"rouge1_diff,none": -11.179879002628741,
						"rouge1_diff_stderr,none": 0.9257074563258036,
						"rouge1_max,none": 51.127204619703704,
						"rouge1_max_stderr,none": 0.8873691794036822,
						"rouge2_acc,none": 0.22888616891064872,
						"rouge2_acc_stderr,none": 0.014706994909055027,
						"rouge2_diff,none": -13.281476205995212,
						"rouge2_diff_stderr,none": 1.1118955654636873,
						"rouge2_max,none": 34.816275436371896,
						"rouge2_max_stderr,none": 1.0354685964088897,
						"rougeL_acc,none": 0.2558139534883721,
						"rougeL_acc_stderr,none": 0.015274176219283352,
						"rougeL_diff,none": -11.457765152233321,
						"rougeL_diff_stderr,none": 0.9338256292489286,
						"rougeL_max,none": 48.402611862361944,
						"rougeL_max_stderr,none": 0.9085375937285607
					},
					"xcopa": {
						"acc,none": 0.6198181818181818,
						"acc_stderr,none": 0.07304688695875908,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4388219544846051,
						"acc_stderr,none": 0.05401692139799717,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6199386318512725,
						"acc_stderr,none": 0.06292593658179597,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8091706001348618,
						"acc_stderr,none": 0.0359989505797655,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6175310033821871,
						"acc_norm,none": 0.616685456595265,
						"acc_norm_stderr,none": 0.09289779725397056,
						"acc_stderr,none": 0.11042489578410247,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.365,
						"acc_stderr,none": 0.014749272239807062,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.365,
						"acc_stderr,none": 0.015231776226264909,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.366,
						"acc_stderr,none": 0.015240612726405749,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3641666666666667,
						"acc_stderr,none": 0.013896714966807267,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3839590443686007,
						"acc_norm,none": 0.4206484641638225,
						"acc_norm_stderr,none": 0.014426211252508408,
						"acc_stderr,none": 0.01421244498065189,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7327441077441077,
						"acc_norm,none": 0.7133838383838383,
						"acc_norm_stderr,none": 0.009278551100969298,
						"acc_stderr,none": 0.009080463246017469,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8248059701492537,
						"acc_stderr,none": 0.16041833086955387,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248118,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426665,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705577929,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731973,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.00977991035984717,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.013607356839598123,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.579,
						"acc_stderr,none": 0.015620595475301315,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.01255952792670738,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343961,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036246,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.0055683935750813615,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033847,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.005814534272734952,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487916,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792949,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.00408995448968909,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745887,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.742,
						"acc_stderr,none": 0.013842963108656603,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.751,
						"acc_stderr,none": 0.013681600278702306,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.768,
						"acc_stderr,none": 0.01335493745228157,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.00727440148169705,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042956,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.00371723254825659,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.014205696104091503,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.00900889339265153,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.01279561361278654,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.634,
						"acc_stderr,none": 0.015240612726405752,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.01286407728849933,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248132,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333345,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333437,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.622,
						"acc_stderr,none": 0.01534116525402664,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118588,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.467,
						"acc_stderr,none": 0.015784807891138782,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.015459721957493377,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.571,
						"acc_stderr,none": 0.015658997547870254,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397344,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.015733516566347826,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724453,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662742,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.766,
						"acc_stderr,none": 0.013394902889660013,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639244,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578159,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783248,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731977,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.486,
						"acc_stderr,none": 0.015813097547730987,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697047,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695796,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426513,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.747,
						"acc_stderr,none": 0.01375427861358708,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.449,
						"acc_stderr,none": 0.015736792768752027,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151101,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427421,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.613,
						"acc_stderr,none": 0.015410011955493939,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855736,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.01024421514533666,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.82,
						"acc_stderr,none": 0.012155153135511954,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341678,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.0074548356504067275,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.0043194510829106065,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099212,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.422,
						"acc_stderr,none": 0.015625625112620663,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.362,
						"acc_stderr,none": 0.015204840912919501,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2951994474184078,
						"acc_norm,none": 0.2951994474184078,
						"acc_norm_stderr,none": 0.051494108725818784,
						"acc_stderr,none": 0.051494108725818784,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516737,
						"acc_stderr,none": 0.03385633936516737,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.28378378378378377,
						"acc_norm,none": 0.28378378378378377,
						"acc_norm_stderr,none": 0.03718409321285373,
						"acc_stderr,none": 0.03718409321285373,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.39375,
						"acc_norm,none": 0.39375,
						"acc_norm_stderr,none": 0.03874695666685831,
						"acc_stderr,none": 0.03874695666685831,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.03588624800091709,
						"acc_stderr,none": 0.03588624800091709,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3397129186602871,
						"acc_norm,none": 0.3397129186602871,
						"acc_norm_stderr,none": 0.032839063537459336,
						"acc_stderr,none": 0.032839063537459336,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.035156741348767645,
						"acc_stderr,none": 0.035156741348767645,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.31297709923664124,
						"acc_norm,none": 0.31297709923664124,
						"acc_norm_stderr,none": 0.04066962905677698,
						"acc_stderr,none": 0.04066962905677698,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.04026377210787311,
						"acc_stderr,none": 0.04026377210787311,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.04522350077382029,
						"acc_stderr,none": 0.04522350077382029,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3281733746130031,
						"acc_norm,none": 0.3281733746130031,
						"acc_norm_stderr,none": 0.02616690401755082,
						"acc_stderr,none": 0.02616690401755082,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28921568627450983,
						"acc_norm,none": 0.28921568627450983,
						"acc_norm_stderr,none": 0.031822318676475544,
						"acc_stderr,none": 0.031822318676475544,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2737430167597765,
						"acc_norm,none": 0.2737430167597765,
						"acc_norm_stderr,none": 0.03342001835130119,
						"acc_stderr,none": 0.03342001835130119,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25738396624472576,
						"acc_norm,none": 0.25738396624472576,
						"acc_norm_stderr,none": 0.0284588209914603,
						"acc_stderr,none": 0.0284588209914603,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.16981132075471697,
						"acc_norm,none": 0.16981132075471697,
						"acc_norm_stderr,none": 0.036641823111517896,
						"acc_stderr,none": 0.036641823111517896,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.38317757009345793,
						"acc_norm,none": 0.38317757009345793,
						"acc_norm_stderr,none": 0.047220130807712334,
						"acc_stderr,none": 0.047220130807712334,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.41509433962264153,
						"acc_norm,none": 0.41509433962264153,
						"acc_norm_stderr,none": 0.04808633394970665,
						"acc_stderr,none": 0.04808633394970665,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.18518518518518517,
						"acc_norm,none": 0.18518518518518517,
						"acc_norm_stderr,none": 0.037552658650371835,
						"acc_stderr,none": 0.037552658650371835,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.04396093377439377,
						"acc_stderr,none": 0.04396093377439377,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.0279848798118845,
						"acc_stderr,none": 0.0279848798118845,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.31862745098039214,
						"acc_norm,none": 0.31862745098039214,
						"acc_norm_stderr,none": 0.03270287181482081,
						"acc_stderr,none": 0.03270287181482081,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.25146198830409355,
						"acc_norm,none": 0.25146198830409355,
						"acc_norm_stderr,none": 0.033275044238468436,
						"acc_stderr,none": 0.033275044238468436,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.037387423042158106,
						"acc_stderr,none": 0.037387423042158106,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.302158273381295,
						"acc_norm,none": 0.302158273381295,
						"acc_norm_stderr,none": 0.03908914479291562,
						"acc_stderr,none": 0.03908914479291562,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3081761006289308,
						"acc_norm,none": 0.3081761006289308,
						"acc_norm_stderr,none": 0.03673404171124563,
						"acc_stderr,none": 0.03673404171124563,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.294478527607362,
						"acc_norm,none": 0.294478527607362,
						"acc_norm_stderr,none": 0.03581165790474082,
						"acc_stderr,none": 0.03581165790474082,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.29651162790697677,
						"acc_norm,none": 0.29651162790697677,
						"acc_norm_stderr,none": 0.03492619473255952,
						"acc_stderr,none": 0.03492619473255952,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2896825396825397,
						"acc_norm,none": 0.2896825396825397,
						"acc_norm_stderr,none": 0.02863192475336099,
						"acc_stderr,none": 0.02863192475336099,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2676767676767677,
						"acc_norm,none": 0.2676767676767677,
						"acc_norm_stderr,none": 0.03154449888270286,
						"acc_stderr,none": 0.03154449888270286,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.38235294117647056,
						"acc_norm,none": 0.38235294117647056,
						"acc_norm_stderr,none": 0.031566630992154156,
						"acc_stderr,none": 0.031566630992154156,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.23478260869565218,
						"acc_norm,none": 0.23478260869565218,
						"acc_norm_stderr,none": 0.028009647070930125,
						"acc_stderr,none": 0.028009647070930125,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.32167832167832167,
						"acc_norm,none": 0.32167832167832167,
						"acc_norm_stderr,none": 0.03919986517659166,
						"acc_stderr,none": 0.03919986517659166,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26136363636363635,
						"acc_norm,none": 0.26136363636363635,
						"acc_norm_stderr,none": 0.033213825516355905,
						"acc_stderr,none": 0.033213825516355905,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3087248322147651,
						"acc_norm,none": 0.3087248322147651,
						"acc_norm_stderr,none": 0.03797348027213082,
						"acc_stderr,none": 0.03797348027213082,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2711864406779661,
						"acc_norm,none": 0.2711864406779661,
						"acc_norm_stderr,none": 0.041100705493392085,
						"acc_stderr,none": 0.041100705493392085,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.20909090909090908,
						"acc_norm,none": 0.20909090909090908,
						"acc_norm_stderr,none": 0.038950910157241385,
						"acc_stderr,none": 0.038950910157241385,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03873144730600104,
						"acc_stderr,none": 0.03873144730600104,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.040735243221471276,
						"acc_stderr,none": 0.040735243221471276,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741765,
						"acc_stderr,none": 0.03351597731741765,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3372093023255814,
						"acc_norm,none": 0.3372093023255814,
						"acc_norm_stderr,none": 0.03615263198871636,
						"acc_stderr,none": 0.03615263198871636,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.29927007299270075,
						"acc_norm,none": 0.29927007299270075,
						"acc_norm_stderr,none": 0.022615961145736822,
						"acc_stderr,none": 0.022615961145736822,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.34579439252336447,
						"acc_norm,none": 0.34579439252336447,
						"acc_norm_stderr,none": 0.032589395336056405,
						"acc_stderr,none": 0.032589395336056405,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3089430894308943,
						"acc_norm,none": 0.3089430894308943,
						"acc_norm_stderr,none": 0.04183273258787625,
						"acc_stderr,none": 0.04183273258787625,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.32786885245901637,
						"acc_norm,none": 0.32786885245901637,
						"acc_norm_stderr,none": 0.04267606874299955,
						"acc_stderr,none": 0.04267606874299955,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.03169833889962088,
						"acc_stderr,none": 0.03169833889962088,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3055555555555556,
						"acc_norm,none": 0.3055555555555556,
						"acc_norm_stderr,none": 0.03443002441392583,
						"acc_stderr,none": 0.03443002441392583,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.30687830687830686,
						"acc_norm,none": 0.30687830687830686,
						"acc_norm_stderr,none": 0.03363635410184865,
						"acc_stderr,none": 0.03363635410184865,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.28448275862068967,
						"acc_norm,none": 0.28448275862068967,
						"acc_norm_stderr,none": 0.0420716075558402,
						"acc_stderr,none": 0.0420716075558402,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.31724137931034485,
						"acc_norm,none": 0.31724137931034485,
						"acc_norm_stderr,none": 0.03878352372138622,
						"acc_stderr,none": 0.03878352372138622,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.045521571818039494,
						"acc_stderr,none": 0.045521571818039494,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26285714285714284,
						"acc_norm,none": 0.26285714285714284,
						"acc_norm_stderr,none": 0.033370375852212746,
						"acc_stderr,none": 0.033370375852212746,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27014218009478674,
						"acc_norm,none": 0.27014218009478674,
						"acc_norm_stderr,none": 0.03064119407629314,
						"acc_stderr,none": 0.03064119407629314,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2632978723404255,
						"acc_norm,none": 0.2632978723404255,
						"acc_norm_stderr,none": 0.022743327388426434,
						"acc_stderr,none": 0.022743327388426434,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3232758620689655,
						"acc_norm,none": 0.3232758620689655,
						"acc_norm_stderr,none": 0.030774179531794447,
						"acc_stderr,none": 0.030774179531794447,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3563218390804598,
						"acc_norm,none": 0.3563218390804598,
						"acc_norm_stderr,none": 0.03641099577255491,
						"acc_stderr,none": 0.03641099577255491,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03785714465066653,
						"acc_stderr,none": 0.03785714465066653,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3230088495575221,
						"acc_norm,none": 0.3230088495575221,
						"acc_norm_stderr,none": 0.031175070714705388,
						"acc_stderr,none": 0.031175070714705388,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3151515151515151,
						"acc_norm,none": 0.3151515151515151,
						"acc_norm_stderr,none": 0.0362773057502241,
						"acc_stderr,none": 0.0362773057502241,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336698,
						"acc_stderr,none": 0.03333068663336698,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03560846537586736,
						"acc_stderr,none": 0.03560846537586736,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2981366459627329,
						"acc_norm,none": 0.2981366459627329,
						"acc_norm_stderr,none": 0.03616379286462018,
						"acc_stderr,none": 0.03616379286462018,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.03541088558070896,
						"acc_stderr,none": 0.03541088558070896,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.016334423518002312,
						"mcc_stderr,none": 0.02702956160115033
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.034873508801977704,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5792341591233922,
						"acc_stderr,none": 0.009549615252674835,
						"alias": "glue",
						"f1,none": 0.6760970082860837,
						"f1_stderr,none": 0.00023375532941714104,
						"mcc,none": -0.016334423518002312,
						"mcc_stderr,none": 0.02702956160115033
					},
					"hellaswag": {
						"acc,none": 0.525094602668791,
						"acc_norm,none": 0.7107149970125473,
						"acc_norm_stderr,none": 0.004525037849178845,
						"acc_stderr,none": 0.004983492928102842,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7079371240054337,
						"acc_stderr,none": 0.016225371123469567,
						"alias": "lambada",
						"perplexity,none": 3.824733600965998,
						"perplexity_stderr,none": 0.21265282709924802
					},
					"lambada_multilingual": {
						"acc,none": 0.536541820298855,
						"acc_stderr,none": 0.08337101181286767,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.673213351045558,
						"perplexity_stderr,none": 8.033475296927634
					},
					"lambada_openai": {
						"acc,none": 0.7382107510188239,
						"acc_stderr,none": 0.006124606054308013,
						"alias": " - lambada_openai",
						"perplexity,none": 3.430241698322102,
						"perplexity_stderr,none": 0.06653347425689152
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42363671647583934,
						"acc_stderr,none": 0.00688425617620753,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 33.888055293028195,
						"perplexity_stderr,none": 1.8305176564841452
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7380166893071997,
						"acc_stderr,none": 0.0061260703148250314,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.429279049466838,
						"perplexity_stderr,none": 0.06646147511791792
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4572093925868426,
						"acc_stderr,none": 0.006940420862895472,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.326432422922924,
						"perplexity_stderr,none": 1.3722541198612677
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.549776829031632,
						"acc_stderr,none": 0.006931372038835374,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.176511755906525,
						"perplexity_stderr,none": 0.7651305821262503
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5140694740927615,
						"acc_stderr,none": 0.006963219279097559,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.54578823390331,
						"perplexity_stderr,none": 1.1164436255705752
					},
					"lambada_standard": {
						"acc,none": 0.6780516204152921,
						"acc_stderr,none": 0.006509334262746004,
						"alias": " - lambada_standard",
						"perplexity,none": 4.220086259710904,
						"perplexity_stderr,none": 0.08869508379122865
					},
					"logiqa": {
						"acc,none": 0.23348694316436253,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.017803862148538012,
						"acc_stderr,none": 0.016593362460570887,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.27823671841618003,
						"acc_stderr,none": 0.04358180301387528,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.15,
						"acc_stderr,none": 0.03588702812826371,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.3111111111111111,
						"acc_stderr,none": 0.03999262876617721,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.28289473684210525,
						"acc_stderr,none": 0.03665349695640767,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3169811320754717,
						"acc_stderr,none": 0.028637235639800904,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2638888888888889,
						"acc_stderr,none": 0.03685651095897532,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322695,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909284,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2543352601156069,
						"acc_stderr,none": 0.0332055644308557,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.04280105837364395,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.32340425531914896,
						"acc_stderr,none": 0.030579442773610334,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2543859649122807,
						"acc_stderr,none": 0.040969851398436695,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.25517241379310346,
						"acc_stderr,none": 0.03632984052707842,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24603174603174602,
						"acc_stderr,none": 0.022182037202948368,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.04073524322147126,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3258064516129032,
						"acc_stderr,none": 0.0266620105785671,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.270935960591133,
						"acc_stderr,none": 0.031270907132977,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.03681050869161551,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2828282828282828,
						"acc_stderr,none": 0.032087795587867514,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.3471502590673575,
						"acc_stderr,none": 0.034356961683613546,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.26666666666666666,
						"acc_stderr,none": 0.022421273612923714,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2814814814814815,
						"acc_stderr,none": 0.027420019350945273,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.25630252100840334,
						"acc_stderr,none": 0.02835962087053395,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23841059602649006,
						"acc_stderr,none": 0.034791855725996586,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.27155963302752295,
						"acc_stderr,none": 0.019069098363191442,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.1712962962962963,
						"acc_stderr,none": 0.025695341643824695,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.29901960784313725,
						"acc_stderr,none": 0.03213325717373617,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.29957805907172996,
						"acc_stderr,none": 0.029818024749753095,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.39461883408071746,
						"acc_stderr,none": 0.03280400504755291,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2748091603053435,
						"acc_stderr,none": 0.03915345408847836,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.26950053134962804,
						"acc_stderr,none": 0.03475903523979383,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.24793388429752067,
						"acc_stderr,none": 0.039418975265163025,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3148148148148148,
						"acc_stderr,none": 0.04489931073591312,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.25766871165644173,
						"acc_stderr,none": 0.03436150827846917,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.33035714285714285,
						"acc_stderr,none": 0.04464285714285712,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2524271844660194,
						"acc_stderr,none": 0.04301250399690878,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.34615384615384615,
						"acc_stderr,none": 0.031166957367235897,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.36015325670498083,
						"acc_stderr,none": 0.017166362471369295,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2543352601156069,
						"acc_stderr,none": 0.02344582627654554,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2335195530726257,
						"acc_stderr,none": 0.014149575348976266,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.25163398692810457,
						"acc_stderr,none": 0.0248480182638752,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3070485999356293,
						"acc_stderr,none": 0.04739119950580957,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3247588424437299,
						"acc_stderr,none": 0.026596782287697043,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.025407197798890162,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2624113475177305,
						"acc_stderr,none": 0.026244920349843007,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2542372881355932,
						"acc_stderr,none": 0.011121129007840678,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.21691176470588236,
						"acc_stderr,none": 0.02503584522771126,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.27941176470588236,
						"acc_stderr,none": 0.018152871051538802,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.33636363636363636,
						"acc_stderr,none": 0.04525393596302505,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.21224489795918366,
						"acc_stderr,none": 0.026176967197866767,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2739681507962301,
						"acc_stderr,none": 0.03677986501944946,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.2935323383084577,
						"acc_stderr,none": 0.03220024104534205,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.26704725658103395,
						"acc_stderr,none": 0.050621563181111266,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.30120481927710846,
						"acc_stderr,none": 0.035716092300534796,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3157894736842105,
						"acc_stderr,none": 0.03565079670708312,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.41579215486500254,
						"acc_stderr,none": 0.004975065592129463,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.4169039869812856,
						"acc_stderr,none": 0.0049726651096968135,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.021463642763705344,
						"alias": " - mrpc",
						"f1,none": 0.8386075949367089,
						"f1_stderr,none": 0.015772750049210724
					},
					"openbookqa": {
						"acc,none": 0.298,
						"acc_norm,none": 0.42,
						"acc_norm_stderr,none": 0.02209471322976178,
						"acc_stderr,none": 0.02047511809298897,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.403,
						"acc_stderr,none": 0.010970673536247522,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3805,
						"acc_stderr,none": 0.010859043899773373,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3945,
						"acc_stderr,none": 0.01093135958200793,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.011130400617630758,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5465,
						"acc_stderr,none": 0.01113466952507867,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5215,
						"acc_stderr,none": 0.011172792428275121,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.509,
						"acc_stderr,none": 0.011181324206260284,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4719285714285714,
						"acc_stderr,none": 0.05840345547343853,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7665941240478781,
						"acc_norm,none": 0.779651795429815,
						"acc_norm_stderr,none": 0.009670535456853148,
						"acc_stderr,none": 0.009869247889521007,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7251034386574691,
						"acc_norm,none": 0.6210467091864162,
						"acc_norm_stderr,none": 0.010571662389924822,
						"acc_stderr,none": 0.15094906760547522,
						"alias": "pythia",
						"bits_per_byte,none": 0.6359161607191486,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5539242318056556,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.430241698322102,
						"perplexity_stderr,none": 0.06653347425689152,
						"word_perplexity,none": 10.55994400861837,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49697968149368477,
						"acc_stderr,none": 0.0067652871181183415,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6603512243383626,
						"acc_stderr,none": 0.002355353087278235,
						"alias": " - qqp",
						"f1,none": 0.6746896617075713,
						"f1_stderr,none": 0.0026230868527408664
					},
					"record": {
						"alias": "record",
						"em,none": 0.2777,
						"em_stderr,none": 0.004478870061488788,
						"f1,none": 0.2880019050180912,
						"f1_stderr,none": 0.004488470308720059
					},
					"rte": {
						"acc,none": 0.6137184115523465,
						"acc_stderr,none": 0.02930772038527051,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.956,
						"acc_norm,none": 0.92,
						"acc_norm_stderr,none": 0.008583336977753656,
						"acc_stderr,none": 0.006488921798427419,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9208715596330275,
						"acc_stderr,none": 0.009146538264185718,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3071806732958668,
						"acc_stderr,none": 0.0015057945036115713,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3023255813953488,
						"bleu_acc_stderr,none": 0.016077509266133026,
						"bleu_diff,none": -8.301944705905596,
						"bleu_diff_stderr,none": 0.865844360712933,
						"bleu_max,none": 26.601520835600947,
						"bleu_max_stderr,none": 0.8079363184976165,
						"rouge1_acc,none": 0.2607099143206854,
						"rouge1_acc_stderr,none": 0.015368841620766367,
						"rouge1_diff,none": -11.179879002628741,
						"rouge1_diff_stderr,none": 0.9257074563258036,
						"rouge1_max,none": 51.127204619703704,
						"rouge1_max_stderr,none": 0.8873691794036822,
						"rouge2_acc,none": 0.22888616891064872,
						"rouge2_acc_stderr,none": 0.014706994909055027,
						"rouge2_diff,none": -13.281476205995212,
						"rouge2_diff_stderr,none": 1.1118955654636873,
						"rouge2_max,none": 34.816275436371896,
						"rouge2_max_stderr,none": 1.0354685964088897,
						"rougeL_acc,none": 0.2558139534883721,
						"rougeL_acc_stderr,none": 0.015274176219283352,
						"rougeL_diff,none": -11.457765152233321,
						"rougeL_diff_stderr,none": 0.9338256292489286,
						"rougeL_max,none": 48.402611862361944,
						"rougeL_max_stderr,none": 0.9085375937285607
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3023255813953488,
						"bleu_acc_stderr,none": 0.016077509266133026,
						"bleu_diff,none": -8.301944705905596,
						"bleu_diff_stderr,none": 0.865844360712933,
						"bleu_max,none": 26.601520835600947,
						"bleu_max_stderr,none": 0.8079363184976165,
						"rouge1_acc,none": 0.2607099143206854,
						"rouge1_acc_stderr,none": 0.015368841620766367,
						"rouge1_diff,none": -11.179879002628741,
						"rouge1_diff_stderr,none": 0.9257074563258036,
						"rouge1_max,none": 51.127204619703704,
						"rouge1_max_stderr,none": 0.8873691794036822,
						"rouge2_acc,none": 0.22888616891064872,
						"rouge2_acc_stderr,none": 0.014706994909055027,
						"rouge2_diff,none": -13.281476205995212,
						"rouge2_diff_stderr,none": 1.1118955654636873,
						"rouge2_max,none": 34.816275436371896,
						"rouge2_max_stderr,none": 1.0354685964088897,
						"rougeL_acc,none": 0.2558139534883721,
						"rougeL_acc_stderr,none": 0.015274176219283352,
						"rougeL_diff,none": -11.457765152233321,
						"rougeL_diff_stderr,none": 0.9338256292489286,
						"rougeL_max,none": 48.402611862361944,
						"rougeL_max_stderr,none": 0.9085375937285607
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2350061199510404,
						"acc_stderr,none": 0.014843061507731606,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3793552266406932,
						"acc_stderr,none": 0.013614070016571911,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6359161607191486,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5539242318056556,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.55994400861837,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6764009471191792,
						"acc_stderr,none": 0.01314888332092315,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4788732394366197,
						"acc_stderr,none": 0.05970805879899504,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5576923076923077,
						"acc_stderr,none": 0.04893740777700999,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6198181818181818,
						"acc_stderr,none": 0.07304688695875908,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.02185468495561126,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.022352791650914167,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.020055833888070904,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.019635965529725512,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.02231833811987053,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.644,
						"acc_stderr,none": 0.02143471235607266,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.020514426225628043,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.020475118092988978,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4388219544846051,
						"acc_stderr,none": 0.05401692139799717,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3369477911646586,
						"acc_stderr,none": 0.009474203778757706,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.485140562248996,
						"acc_stderr,none": 0.01001764608425538,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4919678714859438,
						"acc_stderr,none": 0.010020779633955253,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39397590361445783,
						"acc_stderr,none": 0.009794163014906758,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5421686746987951,
						"acc_stderr,none": 0.009986366819196488,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5056224899598394,
						"acc_stderr,none": 0.01002143920377733,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5184738955823294,
						"acc_stderr,none": 0.010015229768357,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41485943775100403,
						"acc_stderr,none": 0.009875705744164687,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4771084337349398,
						"acc_stderr,none": 0.010011563747774335,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3815261044176707,
						"acc_stderr,none": 0.009736668133098167,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41606425702811245,
						"acc_stderr,none": 0.009879848511479756,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4469879518072289,
						"acc_stderr,none": 0.00996558406254617,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40240963855421685,
						"acc_stderr,none": 0.009829321288467432,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41244979919678715,
						"acc_stderr,none": 0.009867237678555588,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3566265060240964,
						"acc_stderr,none": 0.009601209437867974,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6199386318512725,
						"acc_stderr,none": 0.06292593658179597,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.587028457974851,
						"acc_stderr,none": 0.01267071629096672,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7796161482461945,
						"acc_stderr,none": 0.010666988429058733,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6962276637988087,
						"acc_stderr,none": 0.011834809582513093,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5512905360688286,
						"acc_stderr,none": 0.01279924669010975,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5943084050297816,
						"acc_stderr,none": 0.012636170220503926,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6446062210456651,
						"acc_stderr,none": 0.01231724793041838,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5221707478491066,
						"acc_stderr,none": 0.012854469625936086,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.671740569159497,
						"acc_stderr,none": 0.012084283945686673,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5466578424884183,
						"acc_stderr,none": 0.012810980537828164,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5876902713434812,
						"acc_stderr,none": 0.01266769412239704,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6379880873593646,
						"acc_stderr,none": 0.01236742376945643,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8091706001348618,
						"acc_stderr,none": 0.0359989505797655,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8688172043010752,
						"acc_stderr,none": 0.00700300740079348,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6867469879518072,
						"acc_stderr,none": 0.05121994210658146,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7445255474452555,
						"acc_stderr,none": 0.014090642175469431,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7490494296577946,
						"acc_stderr,none": 0.026785433946579913,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6984126984126984,
						"acc_stderr,none": 0.02589988079483365,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7777777777777778,
						"acc_stderr,none": 0.018536917448559433,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/R4-7B-15t-extd-e2_pth"
	},
	"./rwkv-x-dev/R4-7B-15t-extd-e3_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6195039458850057,
						"acc_norm,none": 0.620913190529876,
						"acc_norm_stderr,none": 0.09407203868850132,
						"acc_stderr,none": 0.10895355274274814,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.369375,
						"acc_stderr,none": 0.014793311904842031,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8259850746268657,
						"acc_stderr,none": 0.15948860396143308,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.295631151787256,
						"acc_norm,none": 0.295631151787256,
						"acc_norm_stderr,none": 0.051680863835773194,
						"acc_stderr,none": 0.051680863835773194,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5925291805621724,
						"acc_stderr,none": 0.009060301865518466,
						"alias": "glue",
						"f1,none": 0.6827689293949226,
						"f1_stderr,none": 0.0002143315114879418,
						"mcc,none": 0.00286100001416597,
						"mcc_stderr,none": 0.030844542384637602
					},
					"lambada": {
						"acc,none": 0.708131185717058,
						"acc_stderr,none": 0.01604646189771103,
						"alias": "lambada",
						"perplexity,none": 3.7972290288812998,
						"perplexity_stderr,none": 0.1998648009036379
					},
					"lambada_multilingual": {
						"acc,none": 0.5348728895788861,
						"acc_stderr,none": 0.08348553667217809,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.69958369050738,
						"perplexity_stderr,none": 8.023397419836229
					},
					"mmlu": {
						"acc,none": 0.27973223187580115,
						"acc_stderr,none": 0.045302535810193015,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2729011689691817,
						"acc_stderr,none": 0.03793122868615571,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.31155455423237843,
						"acc_stderr,none": 0.050441879769084916,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.27006824829379267,
						"acc_stderr,none": 0.03550617323062232,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2679987313669521,
						"acc_stderr,none": 0.05079236680702764,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4699285714285714,
						"acc_stderr,none": 0.05730816510857095,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7262239766812161,
						"acc_norm,none": 0.6251878562533685,
						"acc_norm_stderr,none": 0.010760686333745394,
						"acc_stderr,none": 0.15032970241695826,
						"alias": "pythia",
						"bits_per_byte,none": 0.6357495918979674,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.553744831184874,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4268400312535876,
						"perplexity_stderr,none": 0.06643461253955245,
						"word_perplexity,none": 10.553426321421641,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3093251122191318,
						"acc_stderr,none": 0.0014520312995378274,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31456548347613217,
						"bleu_acc_stderr,none": 0.01625524199317918,
						"bleu_diff,none": -7.550735568627145,
						"bleu_diff_stderr,none": 0.8613891617867167,
						"bleu_max,none": 26.801090438529904,
						"bleu_max_stderr,none": 0.8105337183427349,
						"rouge1_acc,none": 0.2631578947368421,
						"rouge1_acc_stderr,none": 0.01541524174023701,
						"rouge1_diff,none": -10.848837103279743,
						"rouge1_diff_stderr,none": 0.9253081755886877,
						"rouge1_max,none": 51.04636706470203,
						"rouge1_max_stderr,none": 0.8907085146595698,
						"rouge2_acc,none": 0.23255813953488372,
						"rouge2_acc_stderr,none": 0.014789157531080515,
						"rouge2_diff,none": -12.552927295558312,
						"rouge2_diff_stderr,none": 1.109374132912298,
						"rouge2_max,none": 34.83519804080084,
						"rouge2_max_stderr,none": 1.0404266067075798,
						"rougeL_acc,none": 0.2631578947368421,
						"rougeL_acc_stderr,none": 0.015415241740237017,
						"rougeL_diff,none": -11.049545377474665,
						"rougeL_diff_stderr,none": 0.9332533592411646,
						"rougeL_max,none": 48.31973150429046,
						"rougeL_max_stderr,none": 0.9109676966224217
					},
					"xcopa": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.07317338206238856,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4368942436412316,
						"acc_stderr,none": 0.05409832255671822,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6197581372961916,
						"acc_stderr,none": 0.06249796636019511,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8105192178017532,
						"acc_stderr,none": 0.03636438013702856,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6195039458850057,
						"acc_norm,none": 0.620913190529876,
						"acc_norm_stderr,none": 0.09407203868850132,
						"acc_stderr,none": 0.10895355274274814,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.369375,
						"acc_stderr,none": 0.014793311904842031,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.368,
						"acc_stderr,none": 0.015258073561521798,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.01527525231651936,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.013943170730086435,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3890784982935154,
						"acc_norm,none": 0.42235494880546076,
						"acc_norm_stderr,none": 0.014434138713379977,
						"acc_stderr,none": 0.014247309976045607,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7331649831649831,
						"acc_norm,none": 0.7188552188552189,
						"acc_norm_stderr,none": 0.009224735470286995,
						"acc_stderr,none": 0.009075915859267264,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8259850746268657,
						"acc_stderr,none": 0.15948860396143308,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248116,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705577929,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.822,
						"acc_stderr,none": 0.01210216767618359,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024954,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.751,
						"acc_stderr,none": 0.013681600278702294,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.583,
						"acc_stderr,none": 0.015599819048769616,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.797,
						"acc_stderr,none": 0.012726073744598276,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837957,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987286,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998097,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452368,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426114,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452373,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487914,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280312,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910652,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074794,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.756,
						"acc_stderr,none": 0.013588548437881428,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.754,
						"acc_stderr,none": 0.013626065817750643,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.01316983084342567,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792935,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.010811655372416051,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.00371723254825659,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.01420569610409151,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.012864077288499344,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.637,
						"acc_stderr,none": 0.015213890444671281,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617333,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919288,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491129,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244052,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118581,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.621,
						"acc_stderr,none": 0.015349091002225347,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074796,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.473,
						"acc_stderr,none": 0.01579621855130262,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.015445859463771302,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.581,
						"acc_stderr,none": 0.015610338967577792,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.01087884871433332,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.561,
						"acc_stderr,none": 0.015701131345400767,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823333,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.00957536880165389,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.768,
						"acc_stderr,none": 0.01335493745228157,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474914,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578159,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248097,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.822,
						"acc_stderr,none": 0.012102167676183585,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.495,
						"acc_stderr,none": 0.015818508944436656,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.0073351758537068155,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248092,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.749,
						"acc_stderr,none": 0.013718133516888944,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.443,
						"acc_stderr,none": 0.0157161699532041,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787731,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.0066278147173807105,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.0154996851658426,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.01024421514533666,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.01028132801274739,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.818,
						"acc_stderr,none": 0.012207580637662164,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357798,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.0073953154557929515,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323492,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.0039698563903194225,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275288,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.427,
						"acc_stderr,none": 0.01564978964446222,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.369,
						"acc_stderr,none": 0.015266698139154622,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.295631151787256,
						"acc_norm,none": 0.295631151787256,
						"acc_norm_stderr,none": 0.051680863835773194,
						"acc_stderr,none": 0.051680863835773194,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.034101678366769764,
						"acc_stderr,none": 0.034101678366769764,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.30405405405405406,
						"acc_norm,none": 0.30405405405405406,
						"acc_norm_stderr,none": 0.03794062549620372,
						"acc_stderr,none": 0.03794062549620372,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.39375,
						"acc_norm,none": 0.39375,
						"acc_norm_stderr,none": 0.038746956666858304,
						"acc_stderr,none": 0.038746956666858304,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.035886248000917075,
						"acc_stderr,none": 0.035886248000917075,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3397129186602871,
						"acc_norm,none": 0.3397129186602871,
						"acc_norm_stderr,none": 0.03283906353745934,
						"acc_stderr,none": 0.03283906353745934,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865142,
						"acc_stderr,none": 0.03462157845865142,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.33587786259541985,
						"acc_norm,none": 0.33587786259541985,
						"acc_norm_stderr,none": 0.04142313771996663,
						"acc_stderr,none": 0.04142313771996663,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3088235294117647,
						"acc_norm,none": 0.3088235294117647,
						"acc_norm_stderr,none": 0.03976333292288875,
						"acc_stderr,none": 0.03976333292288875,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2897196261682243,
						"acc_norm,none": 0.2897196261682243,
						"acc_norm_stderr,none": 0.0440606533474851,
						"acc_stderr,none": 0.0440606533474851,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3281733746130031,
						"acc_norm,none": 0.3281733746130031,
						"acc_norm_stderr,none": 0.026166904017550823,
						"acc_stderr,none": 0.026166904017550823,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.30392156862745096,
						"acc_norm,none": 0.30392156862745096,
						"acc_norm_stderr,none": 0.032282103870378914,
						"acc_stderr,none": 0.032282103870378914,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2849162011173184,
						"acc_norm,none": 0.2849162011173184,
						"acc_norm_stderr,none": 0.033831950813285244,
						"acc_stderr,none": 0.033831950813285244,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2616033755274262,
						"acc_norm,none": 0.2616033755274262,
						"acc_norm_stderr,none": 0.028609516716994934,
						"acc_stderr,none": 0.028609516716994934,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.16981132075471697,
						"acc_norm,none": 0.16981132075471697,
						"acc_norm_stderr,none": 0.036641823111517896,
						"acc_stderr,none": 0.036641823111517896,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.38317757009345793,
						"acc_norm,none": 0.38317757009345793,
						"acc_norm_stderr,none": 0.047220130807712334,
						"acc_stderr,none": 0.047220130807712334,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.39622641509433965,
						"acc_norm,none": 0.39622641509433965,
						"acc_norm_stderr,none": 0.04773249298367361,
						"acc_stderr,none": 0.04773249298367361,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2037037037037037,
						"acc_norm,none": 0.2037037037037037,
						"acc_norm_stderr,none": 0.03893542518824847,
						"acc_stderr,none": 0.03893542518824847,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.28205128205128205,
						"acc_norm,none": 0.28205128205128205,
						"acc_norm_stderr,none": 0.02728514708163732,
						"acc_stderr,none": 0.02728514708163732,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.03283472056108567,
						"acc_stderr,none": 0.03283472056108567,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.22807017543859648,
						"acc_norm,none": 0.22807017543859648,
						"acc_norm_stderr,none": 0.032180937956023566,
						"acc_stderr,none": 0.032180937956023566,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.03711513959675178,
						"acc_stderr,none": 0.03711513959675178,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.302158273381295,
						"acc_norm,none": 0.302158273381295,
						"acc_norm_stderr,none": 0.03908914479291562,
						"acc_stderr,none": 0.03908914479291562,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.32075471698113206,
						"acc_norm,none": 0.32075471698113206,
						"acc_norm_stderr,none": 0.03713396279871007,
						"acc_stderr,none": 0.03713396279871007,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2883435582822086,
						"acc_norm,none": 0.2883435582822086,
						"acc_norm_stderr,none": 0.035590395316173425,
						"acc_stderr,none": 0.035590395316173425,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.29651162790697677,
						"acc_norm,none": 0.29651162790697677,
						"acc_norm_stderr,none": 0.03492619473255952,
						"acc_stderr,none": 0.03492619473255952,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.028514456573421422,
						"acc_stderr,none": 0.028514456573421422,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03173071239071724,
						"acc_stderr,none": 0.03173071239071724,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3865546218487395,
						"acc_norm,none": 0.3865546218487395,
						"acc_norm_stderr,none": 0.03163145807552379,
						"acc_stderr,none": 0.03163145807552379,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.23478260869565218,
						"acc_norm,none": 0.23478260869565218,
						"acc_norm_stderr,none": 0.028009647070930118,
						"acc_stderr,none": 0.028009647070930118,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03785714465066655,
						"acc_stderr,none": 0.03785714465066655,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03873144730600104,
						"acc_stderr,none": 0.03873144730600104,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2897727272727273,
						"acc_norm,none": 0.2897727272727273,
						"acc_norm_stderr,none": 0.03429323080239875,
						"acc_stderr,none": 0.03429323080239875,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.31543624161073824,
						"acc_norm,none": 0.31543624161073824,
						"acc_norm_stderr,none": 0.03819723167141383,
						"acc_stderr,none": 0.03819723167141383,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.03980329854920432,
						"acc_stderr,none": 0.03980329854920432,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.04069306319721376,
						"acc_stderr,none": 0.04069306319721376,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.32167832167832167,
						"acc_norm,none": 0.32167832167832167,
						"acc_norm_stderr,none": 0.03919986517659165,
						"acc_stderr,none": 0.03919986517659165,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.040735243221471276,
						"acc_stderr,none": 0.040735243221471276,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741764,
						"acc_stderr,none": 0.03351597731741764,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.32558139534883723,
						"acc_norm,none": 0.32558139534883723,
						"acc_norm_stderr,none": 0.03583410038767278,
						"acc_stderr,none": 0.03583410038767278,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.291970802919708,
						"acc_norm,none": 0.291970802919708,
						"acc_norm_stderr,none": 0.022454498879013785,
						"acc_stderr,none": 0.022454498879013785,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3691588785046729,
						"acc_norm,none": 0.3691588785046729,
						"acc_norm_stderr,none": 0.03306563404172724,
						"acc_stderr,none": 0.03306563404172724,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3252032520325203,
						"acc_norm,none": 0.3252032520325203,
						"acc_norm_stderr,none": 0.04241153733573298,
						"acc_stderr,none": 0.04241153733573298,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.32786885245901637,
						"acc_norm,none": 0.32786885245901637,
						"acc_norm_stderr,none": 0.04267606874299955,
						"acc_stderr,none": 0.04267606874299955,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.28095238095238095,
						"acc_norm,none": 0.28095238095238095,
						"acc_norm_stderr,none": 0.03109009446934461,
						"acc_stderr,none": 0.03109009446934461,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3055555555555556,
						"acc_norm,none": 0.3055555555555556,
						"acc_norm_stderr,none": 0.03443002441392583,
						"acc_stderr,none": 0.03443002441392583,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31746031746031744,
						"acc_norm,none": 0.31746031746031744,
						"acc_norm_stderr,none": 0.03394921616447879,
						"acc_stderr,none": 0.03394921616447879,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3017241379310345,
						"acc_norm,none": 0.3017241379310345,
						"acc_norm_stderr,none": 0.0428025479250546,
						"acc_stderr,none": 0.0428025479250546,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.33793103448275863,
						"acc_norm,none": 0.33793103448275863,
						"acc_norm_stderr,none": 0.0394170763206489,
						"acc_stderr,none": 0.0394170763206489,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.045521571818039494,
						"acc_stderr,none": 0.045521571818039494,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.033133343292217204,
						"acc_stderr,none": 0.033133343292217204,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27014218009478674,
						"acc_norm,none": 0.27014218009478674,
						"acc_norm_stderr,none": 0.03064119407629314,
						"acc_stderr,none": 0.03064119407629314,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.26063829787234044,
						"acc_norm,none": 0.26063829787234044,
						"acc_norm_stderr,none": 0.022668978836259783,
						"acc_stderr,none": 0.022668978836259783,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.33620689655172414,
						"acc_norm,none": 0.33620689655172414,
						"acc_norm_stderr,none": 0.031082338581586128,
						"acc_stderr,none": 0.031082338581586128,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3505747126436782,
						"acc_norm,none": 0.3505747126436782,
						"acc_norm_stderr,none": 0.03627703962615276,
						"acc_stderr,none": 0.03627703962615276,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800254,
						"acc_stderr,none": 0.03885004245800254,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3053097345132743,
						"acc_norm,none": 0.3053097345132743,
						"acc_norm_stderr,none": 0.030702565982138927,
						"acc_stderr,none": 0.030702565982138927,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3151515151515151,
						"acc_norm,none": 0.3151515151515151,
						"acc_norm_stderr,none": 0.0362773057502241,
						"acc_stderr,none": 0.0362773057502241,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03560846537586736,
						"acc_stderr,none": 0.03560846537586736,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2795031055900621,
						"acc_norm,none": 0.2795031055900621,
						"acc_norm_stderr,none": 0.035477203909303916,
						"acc_stderr,none": 0.035477203909303916,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.30625,
						"acc_norm,none": 0.30625,
						"acc_norm_stderr,none": 0.0365545115043377,
						"acc_stderr,none": 0.0365545115043377,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.00286100001416597,
						"mcc_stderr,none": 0.030844542384637602
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.034873508801977704,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5925291805621724,
						"acc_stderr,none": 0.009060301865518466,
						"alias": "glue",
						"f1,none": 0.6827689293949226,
						"f1_stderr,none": 0.0002143315114879418,
						"mcc,none": 0.00286100001416597,
						"mcc_stderr,none": 0.030844542384637602
					},
					"hellaswag": {
						"acc,none": 0.5256920932085242,
						"acc_norm,none": 0.7108145787691695,
						"acc_norm_stderr,none": 0.004524575892952941,
						"acc_stderr,none": 0.004983189711208511,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.708131185717058,
						"acc_stderr,none": 0.01604646189771103,
						"alias": "lambada",
						"perplexity,none": 3.7972290288812998,
						"perplexity_stderr,none": 0.1998648009036379
					},
					"lambada_multilingual": {
						"acc,none": 0.5348728895788861,
						"acc_stderr,none": 0.08348553667217809,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.69958369050738,
						"perplexity_stderr,none": 8.023397419836229
					},
					"lambada_openai": {
						"acc,none": 0.7378226275955754,
						"acc_stderr,none": 0.006127533032498063,
						"alias": " - lambada_openai",
						"perplexity,none": 3.4268400312535876,
						"perplexity_stderr,none": 0.06643461253955245
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4238307781874636,
						"acc_stderr,none": 0.006884673454916905,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 33.76427287030846,
						"perplexity_stderr,none": 1.8250161320893652
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7372404424607025,
						"acc_stderr,none": 0.006131911939484161,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.4291770378884343,
						"perplexity_stderr,none": 0.06645955631756637
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45371628177760526,
						"acc_stderr,none": 0.00693606865693546,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.522176183964998,
						"perplexity_stderr,none": 1.3817704056077003
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.548418397050262,
						"acc_stderr,none": 0.006933239470474421,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.18538219960447,
						"perplexity_stderr,none": 0.7652857952171268
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.511158548418397,
						"acc_stderr,none": 0.0069642427287659806,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.59691016077054,
						"perplexity_stderr,none": 1.1186977092227166
					},
					"lambada_standard": {
						"acc,none": 0.678633805550165,
						"acc_stderr,none": 0.006506237504010488,
						"alias": " - lambada_standard",
						"perplexity,none": 4.165796149249942,
						"perplexity_stderr,none": 0.0869205842305695
					},
					"logiqa": {
						"acc,none": 0.23195084485407066,
						"acc_norm,none": 0.29339477726574503,
						"acc_norm_stderr,none": 0.017859032704399504,
						"acc_stderr,none": 0.016555252497925898,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.27973223187580115,
						"acc_stderr,none": 0.045302535810193015,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.17,
						"acc_stderr,none": 0.03775251680686371,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.32592592592592595,
						"acc_stderr,none": 0.040491220417025055,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.28289473684210525,
						"acc_stderr,none": 0.03665349695640767,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.32452830188679244,
						"acc_stderr,none": 0.028815615713432115,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2569444444444444,
						"acc_stderr,none": 0.03653946969442099,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.23699421965317918,
						"acc_stderr,none": 0.03242414757483098,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171452,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3276595744680851,
						"acc_stderr,none": 0.030683020843231004,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2543859649122807,
						"acc_stderr,none": 0.040969851398436695,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.25517241379310346,
						"acc_stderr,none": 0.03632984052707842,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2566137566137566,
						"acc_stderr,none": 0.022494510767503154,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.0404061017820884,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3225806451612903,
						"acc_stderr,none": 0.026593084516572288,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.26108374384236455,
						"acc_stderr,none": 0.030903796952114475,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3696969696969697,
						"acc_stderr,none": 0.037694303145125674,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.03173071239071724,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.3316062176165803,
						"acc_stderr,none": 0.03397636541089116,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.258974358974359,
						"acc_stderr,none": 0.02221110681006166,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2814814814814815,
						"acc_stderr,none": 0.027420019350945273,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.25630252100840334,
						"acc_stderr,none": 0.02835962087053395,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.24503311258278146,
						"acc_stderr,none": 0.03511807571804726,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.26605504587155965,
						"acc_stderr,none": 0.018946022322225604,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.17592592592592593,
						"acc_stderr,none": 0.025967420958258533,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.3137254901960784,
						"acc_stderr,none": 0.03256685484460388,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.29957805907172996,
						"acc_stderr,none": 0.029818024749753095,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.40358744394618834,
						"acc_stderr,none": 0.032928028193303156,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2748091603053435,
						"acc_stderr,none": 0.03915345408847836,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2729011689691817,
						"acc_stderr,none": 0.03793122868615571,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.256198347107438,
						"acc_stderr,none": 0.03984979653302872,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.044531975073749834,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.26380368098159507,
						"acc_stderr,none": 0.034624199316156234,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3125,
						"acc_stderr,none": 0.043994650575715215,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2621359223300971,
						"acc_stderr,none": 0.043546310772605956,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3418803418803419,
						"acc_stderr,none": 0.03107502852650775,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.37292464878671777,
						"acc_stderr,none": 0.017292868269453924,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.24855491329479767,
						"acc_stderr,none": 0.023267528432100174,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24581005586592178,
						"acc_stderr,none": 0.014400296429225598,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.24836601307189543,
						"acc_stderr,none": 0.02473998135511359,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.31155455423237843,
						"acc_stderr,none": 0.050441879769084916,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.33440514469453375,
						"acc_stderr,none": 0.026795422327893944,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.30246913580246915,
						"acc_stderr,none": 0.025557653981868055,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.26595744680851063,
						"acc_stderr,none": 0.026358065698880585,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.24771838331160365,
						"acc_stderr,none": 0.011025499291443738,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.21323529411764705,
						"acc_stderr,none": 0.024880971512294264,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.018120224251484577,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.33636363636363636,
						"acc_stderr,none": 0.04525393596302505,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.20816326530612245,
						"acc_stderr,none": 0.025991117672813296,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.27006824829379267,
						"acc_stderr,none": 0.03550617323062232,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.29850746268656714,
						"acc_stderr,none": 0.03235743789355043,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2679987313669521,
						"acc_stderr,none": 0.05079236680702764,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3132530120481928,
						"acc_stderr,none": 0.036108050180310235,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.03615507630310936,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.43739174732552216,
						"acc_stderr,none": 0.005007434931696606,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.4372457282343369,
						"acc_stderr,none": 0.005002917482419273,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.021463642763705344,
						"alias": " - mrpc",
						"f1,none": 0.8380952380952381,
						"f1_stderr,none": 0.015820897511560118
					},
					"openbookqa": {
						"acc,none": 0.3,
						"acc_norm,none": 0.416,
						"acc_norm_stderr,none": 0.022064943313928866,
						"acc_stderr,none": 0.02051442622562804,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4005,
						"acc_stderr,none": 0.010959467594960344,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.383,
						"acc_stderr,none": 0.010872654105766945,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.394,
						"acc_stderr,none": 0.010928939603659161,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.011131484850525779,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5465,
						"acc_stderr,none": 0.01113466952507867,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5085,
						"acc_stderr,none": 0.011181519941139164,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.509,
						"acc_stderr,none": 0.011181324206260284,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4699285714285714,
						"acc_stderr,none": 0.05730816510857095,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7682263329706203,
						"acc_norm,none": 0.780739934711643,
						"acc_norm_stderr,none": 0.009653357463605315,
						"acc_stderr,none": 0.009845143772794036,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7262239766812161,
						"acc_norm,none": 0.6251878562533685,
						"acc_norm_stderr,none": 0.010760686333745394,
						"acc_stderr,none": 0.15032970241695826,
						"alias": "pythia",
						"bits_per_byte,none": 0.6357495918979674,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.553744831184874,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4268400312535876,
						"perplexity_stderr,none": 0.06643461253955245,
						"word_perplexity,none": 10.553426321421641,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49697968149368477,
						"acc_stderr,none": 0.006765287118118341,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6721493940143458,
						"acc_stderr,none": 0.002334664079478855,
						"alias": " - qqp",
						"f1,none": 0.6814237988799962,
						"f1_stderr,none": 0.0026230916250295426
					},
					"record": {
						"alias": "record",
						"em,none": 0.2759,
						"em_stderr,none": 0.004469890042462,
						"f1,none": 0.2861885716855526,
						"f1_stderr,none": 0.004480147605339446
					},
					"rte": {
						"acc,none": 0.6101083032490975,
						"acc_stderr,none": 0.02935762508384806,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.957,
						"acc_norm,none": 0.923,
						"acc_norm_stderr,none": 0.008434580140240679,
						"acc_stderr,none": 0.006418114379799741,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9220183486238532,
						"acc_stderr,none": 0.009085669152846603,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3093251122191318,
						"acc_stderr,none": 0.0014520312995378274,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31456548347613217,
						"bleu_acc_stderr,none": 0.01625524199317918,
						"bleu_diff,none": -7.550735568627145,
						"bleu_diff_stderr,none": 0.8613891617867167,
						"bleu_max,none": 26.801090438529904,
						"bleu_max_stderr,none": 0.8105337183427349,
						"rouge1_acc,none": 0.2631578947368421,
						"rouge1_acc_stderr,none": 0.01541524174023701,
						"rouge1_diff,none": -10.848837103279743,
						"rouge1_diff_stderr,none": 0.9253081755886877,
						"rouge1_max,none": 51.04636706470203,
						"rouge1_max_stderr,none": 0.8907085146595698,
						"rouge2_acc,none": 0.23255813953488372,
						"rouge2_acc_stderr,none": 0.014789157531080515,
						"rouge2_diff,none": -12.552927295558312,
						"rouge2_diff_stderr,none": 1.109374132912298,
						"rouge2_max,none": 34.83519804080084,
						"rouge2_max_stderr,none": 1.0404266067075798,
						"rougeL_acc,none": 0.2631578947368421,
						"rougeL_acc_stderr,none": 0.015415241740237017,
						"rougeL_diff,none": -11.049545377474665,
						"rougeL_diff_stderr,none": 0.9332533592411646,
						"rougeL_max,none": 48.31973150429046,
						"rougeL_max_stderr,none": 0.9109676966224217
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.31456548347613217,
						"bleu_acc_stderr,none": 0.01625524199317918,
						"bleu_diff,none": -7.550735568627145,
						"bleu_diff_stderr,none": 0.8613891617867167,
						"bleu_max,none": 26.801090438529904,
						"bleu_max_stderr,none": 0.8105337183427349,
						"rouge1_acc,none": 0.2631578947368421,
						"rouge1_acc_stderr,none": 0.01541524174023701,
						"rouge1_diff,none": -10.848837103279743,
						"rouge1_diff_stderr,none": 0.9253081755886877,
						"rouge1_max,none": 51.04636706470203,
						"rouge1_max_stderr,none": 0.8907085146595698,
						"rouge2_acc,none": 0.23255813953488372,
						"rouge2_acc_stderr,none": 0.014789157531080515,
						"rouge2_diff,none": -12.552927295558312,
						"rouge2_diff_stderr,none": 1.109374132912298,
						"rouge2_max,none": 34.83519804080084,
						"rouge2_max_stderr,none": 1.0404266067075798,
						"rougeL_acc,none": 0.2631578947368421,
						"rougeL_acc_stderr,none": 0.015415241740237017,
						"rougeL_diff,none": -11.049545377474665,
						"rougeL_diff_stderr,none": 0.9332533592411646,
						"rougeL_max,none": 48.31973150429046,
						"rougeL_max_stderr,none": 0.9109676966224217
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2386780905752754,
						"acc_stderr,none": 0.014922629695456416,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.37997213386298817,
						"acc_stderr,none": 0.013586689324125783,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6357495918979674,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.553744831184874,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.553426321421641,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6771902131018153,
						"acc_stderr,none": 0.013140498173357947,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5352112676056338,
						"acc_stderr,none": 0.0596130578497224,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5480769230769231,
						"acc_stderr,none": 0.049038186969314335,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.07317338206238856,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.021834685869369208,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.02009995064750323,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.019684688820194713,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.022311333245289663,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.02217510926561316,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.644,
						"acc_stderr,none": 0.02143471235607266,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.020435342091896142,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.020435342091896135,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4368942436412316,
						"acc_stderr,none": 0.05409832255671822,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.009476976849778586,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.48032128514056227,
						"acc_stderr,none": 0.010014307727112709,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.493574297188755,
						"acc_stderr,none": 0.010021245217159394,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3927710843373494,
						"acc_stderr,none": 0.009788891787583067,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5441767068273092,
						"acc_stderr,none": 0.009982878443738399,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5004016064257029,
						"acc_stderr,none": 0.01002206963435386,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5144578313253012,
						"acc_stderr,none": 0.01001788218560602,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41365461847389556,
						"acc_stderr,none": 0.009871502159099363,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.47670682730923697,
						"acc_stderr,none": 0.0100111915700213,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3795180722891566,
						"acc_stderr,none": 0.009726763372837142,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41646586345381525,
						"acc_stderr,none": 0.00988121593211599,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4461847389558233,
						"acc_stderr,none": 0.009963854274139159,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.39718875502008033,
						"acc_stderr,none": 0.009807915070677296,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40401606425702813,
						"acc_stderr,none": 0.00983567444538583,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3566265060240964,
						"acc_stderr,none": 0.009601209437867976,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6197581372961916,
						"acc_stderr,none": 0.06249796636019511,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.585704831237591,
						"acc_stderr,none": 0.012676689821720669,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7769688947716744,
						"acc_stderr,none": 0.010712628906979181,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6955658504301787,
						"acc_stderr,none": 0.011842062145503249,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5486432825943084,
						"acc_stderr,none": 0.012806088966122401,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5956320317670417,
						"acc_stderr,none": 0.012629580396570946,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6452680344142951,
						"acc_stderr,none": 0.012312089524603838,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5215089344804765,
						"acc_stderr,none": 0.012855214257296611,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.671740569159497,
						"acc_stderr,none": 0.012084283945686673,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5499669093315684,
						"acc_stderr,none": 0.012802713598219839,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.587028457974851,
						"acc_stderr,none": 0.01267071629096672,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6393117140966248,
						"acc_stderr,none": 0.012357592682139025,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8105192178017532,
						"acc_stderr,none": 0.03636438013702856,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8709677419354839,
						"acc_stderr,none": 0.006953958940141571,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.0506639425494172,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7382690302398331,
						"acc_stderr,none": 0.0142020856634007,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7490494296577946,
						"acc_stderr,none": 0.026785433946579916,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.7015873015873015,
						"acc_stderr,none": 0.02582169136035425,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7876984126984127,
						"acc_stderr,none": 0.018233607978187162,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/R4-7B-15t-extd-e3_pth"
	},
	"./rwkv-x-dev/R4-7B-Base-32k-No-Mask_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6245772266065389,
						"acc_norm,none": 0.6254227733934611,
						"acc_norm_stderr,none": 0.0906022472075351,
						"acc_stderr,none": 0.10613908473542506,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.35875,
						"acc_stderr,none": 0.017155083114387657,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.838776119402985,
						"acc_stderr,none": 0.143039356067901,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.30012087722327746,
						"acc_norm,none": 0.30012087722327746,
						"acc_norm_stderr,none": 0.05392283640486302,
						"acc_stderr,none": 0.05392283640486302,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5739489042401144,
						"acc_stderr,none": 0.013353323846468576,
						"alias": "glue",
						"f1,none": 0.6854371479110042,
						"f1_stderr,none": 0.0002108194612770564,
						"mcc,none": -0.020702674026557004,
						"mcc_stderr,none": 0.013130625586951826
					},
					"lambada": {
						"acc,none": 0.7137589753541627,
						"acc_stderr,none": 0.01453009339507032,
						"alias": "lambada",
						"perplexity,none": 3.846826313125052,
						"perplexity_stderr,none": 0.2231406531831286
					},
					"lambada_multilingual": {
						"acc,none": 0.5321948379584707,
						"acc_stderr,none": 0.08553746830229633,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.586998037277183,
						"perplexity_stderr,none": 8.44185343935239
					},
					"mmlu": {
						"acc,none": 0.3349950149551346,
						"acc_stderr,none": 0.062221516294539314,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3275239107332625,
						"acc_stderr,none": 0.062372365253175376,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3730286449951722,
						"acc_stderr,none": 0.051216234291621164,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.35976600584985374,
						"acc_stderr,none": 0.0531632696310427,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2844909609895338,
						"acc_stderr,none": 0.059232012978177234,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.45207142857142857,
						"acc_stderr,none": 0.053085560471253235,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7438353656923419,
						"acc_norm,none": 0.6293876042946833,
						"acc_norm_stderr,none": 0.010169978671853298,
						"acc_stderr,none": 0.13893458420677826,
						"alias": "pythia",
						"bits_per_byte,none": 0.6329570488421921,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.550740244183999,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4288265320314064,
						"perplexity_stderr,none": 0.06731099002281415,
						"word_perplexity,none": 10.444753759354695,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3309072269382368,
						"acc_stderr,none": 0.0016765743781892778,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3561811505507956,
						"bleu_acc_stderr,none": 0.016763790728446335,
						"bleu_diff,none": -4.369185523968988,
						"bleu_diff_stderr,none": 0.9284895221805273,
						"bleu_max,none": 28.716266881587895,
						"bleu_max_stderr,none": 0.820444289892688,
						"rouge1_acc,none": 0.3243574051407589,
						"rouge1_acc_stderr,none": 0.01638797677964794,
						"rouge1_diff,none": -5.56010861966572,
						"rouge1_diff_stderr,none": 1.067267545016389,
						"rouge1_max,none": 54.12407918192328,
						"rouge1_max_stderr,none": 0.8672421342889551,
						"rouge2_acc,none": 0.28886168910648713,
						"rouge2_acc_stderr,none": 0.015866346401384308,
						"rouge2_diff,none": -7.235733073318749,
						"rouge2_diff_stderr,none": 1.259042617370475,
						"rouge2_max,none": 38.4329564656009,
						"rouge2_max_stderr,none": 1.051575772842756,
						"rougeL_acc,none": 0.33047735618115054,
						"rougeL_acc_stderr,none": 0.016466769613698303,
						"rougeL_diff,none": -5.610813209337879,
						"rougeL_diff_stderr,none": 1.0847061770112485,
						"rougeL_max,none": 51.53233532631266,
						"rougeL_max_stderr,none": 0.8919939083382645
					},
					"xcopa": {
						"acc,none": 0.6229090909090909,
						"acc_stderr,none": 0.07050144021381959,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.44061579651941096,
						"acc_stderr,none": 0.04924014074907689,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6330545695204861,
						"acc_stderr,none": 0.06025268717757791,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8035513598561475,
						"acc_stderr,none": 0.03618061730130849,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6245772266065389,
						"acc_norm,none": 0.6254227733934611,
						"acc_norm_stderr,none": 0.0906022472075351,
						"acc_stderr,none": 0.10613908473542506,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.35875,
						"acc_stderr,none": 0.017155083114387657,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.377,
						"acc_stderr,none": 0.015333170125779857,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.344,
						"acc_stderr,none": 0.015029633724408945,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.35583333333333333,
						"acc_stderr,none": 0.013826518748493308,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.40017064846416384,
						"acc_norm,none": 0.43430034129692835,
						"acc_norm_stderr,none": 0.01448470304885736,
						"acc_stderr,none": 0.014317197787809193,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7352693602693603,
						"acc_norm,none": 0.7196969696969697,
						"acc_norm_stderr,none": 0.009216306864088038,
						"acc_stderr,none": 0.009053021086173967,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.838776119402985,
						"acc_stderr,none": 0.143039356067901,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942323,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000143,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.829,
						"acc_stderr,none": 0.011912216456264602,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866442,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.782,
						"acc_stderr,none": 0.013063179040595285,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.603,
						"acc_stderr,none": 0.015480007449307989,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425682,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357788,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987295,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403623,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389636,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.00597215762238962,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679196,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706839,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315158,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178344,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.00833333333333336,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.797,
						"acc_stderr,none": 0.01272607374459826,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.773,
						"acc_stderr,none": 0.013253174964763914,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.01246159264665999,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240648,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.836,
						"acc_stderr,none": 0.011715000693181325,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319422,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.411,
						"acc_stderr,none": 0.015566673418599273,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.01101091459599244,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.012486268734370143,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.014580006055436965,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665539,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832007,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704168,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557428,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524282,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343965,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.015801065586651758,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.603,
						"acc_stderr,none": 0.015480007449307978,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.014526080235459543,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286423,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357807,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653869,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524282,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319329,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578159,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323502,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319324,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.01578686875935901,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.005568393575081363,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491108,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745029945,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.727,
						"acc_stderr,none": 0.014095022868717616,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.473,
						"acc_stderr,none": 0.01579621855130262,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160326,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.0061998740663370714,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.634,
						"acc_stderr,none": 0.015240612726405749,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837957,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816336,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665546,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697596,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240648,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274702,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.00556839357508137,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.406,
						"acc_stderr,none": 0.015537226438634604,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.364,
						"acc_stderr,none": 0.015222868840522017,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.30012087722327746,
						"acc_norm,none": 0.30012087722327746,
						"acc_norm_stderr,none": 0.05392283640486302,
						"acc_stderr,none": 0.05392283640486302,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.28378378378378377,
						"acc_norm,none": 0.28378378378378377,
						"acc_norm_stderr,none": 0.03718409321285373,
						"acc_stderr,none": 0.03718409321285373,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.21951219512195122,
						"acc_norm,none": 0.21951219512195122,
						"acc_norm_stderr,none": 0.03242041613395385,
						"acc_stderr,none": 0.03242041613395385,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.4125,
						"acc_norm,none": 0.4125,
						"acc_norm_stderr,none": 0.039040677866833816,
						"acc_stderr,none": 0.039040677866833816,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03477691162163659,
						"acc_stderr,none": 0.03477691162163659,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.35406698564593303,
						"acc_norm,none": 0.35406698564593303,
						"acc_norm_stderr,none": 0.0331592569829487,
						"acc_stderr,none": 0.0331592569829487,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.21875,
						"acc_norm,none": 0.21875,
						"acc_norm_stderr,none": 0.032784644885244255,
						"acc_stderr,none": 0.032784644885244255,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3511450381679389,
						"acc_norm,none": 0.3511450381679389,
						"acc_norm_stderr,none": 0.04186445163013751,
						"acc_stderr,none": 0.04186445163013751,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.040263772107873096,
						"acc_stderr,none": 0.040263772107873096,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.308411214953271,
						"acc_norm,none": 0.308411214953271,
						"acc_norm_stderr,none": 0.04485760883316698,
						"acc_stderr,none": 0.04485760883316698,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3281733746130031,
						"acc_norm,none": 0.3281733746130031,
						"acc_norm_stderr,none": 0.026166904017550823,
						"acc_stderr,none": 0.026166904017550823,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3284313725490196,
						"acc_norm,none": 0.3284313725490196,
						"acc_norm_stderr,none": 0.032962451101722294,
						"acc_stderr,none": 0.032962451101722294,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.31843575418994413,
						"acc_norm,none": 0.31843575418994413,
						"acc_norm_stderr,none": 0.03491839802265681,
						"acc_stderr,none": 0.03491839802265681,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2869198312236287,
						"acc_norm,none": 0.2869198312236287,
						"acc_norm_stderr,none": 0.029443773022594703,
						"acc_stderr,none": 0.029443773022594703,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.04084247315337099,
						"acc_stderr,none": 0.04084247315337099,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3644859813084112,
						"acc_norm,none": 0.3644859813084112,
						"acc_norm_stderr,none": 0.046746602211107734,
						"acc_stderr,none": 0.046746602211107734,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.37735849056603776,
						"acc_norm,none": 0.37735849056603776,
						"acc_norm_stderr,none": 0.04730439022852894,
						"acc_stderr,none": 0.04730439022852894,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04186091791394607,
						"acc_stderr,none": 0.04186091791394607,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.04350546818999061,
						"acc_stderr,none": 0.04350546818999061,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2564102564102564,
						"acc_norm,none": 0.2564102564102564,
						"acc_norm_stderr,none": 0.026475851706699704,
						"acc_stderr,none": 0.026475851706699704,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3284313725490196,
						"acc_norm,none": 0.3284313725490196,
						"acc_norm_stderr,none": 0.03296245110172227,
						"acc_stderr,none": 0.03296245110172227,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2982456140350877,
						"acc_norm,none": 0.2982456140350877,
						"acc_norm_stderr,none": 0.035087719298245626,
						"acc_stderr,none": 0.035087719298245626,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.272108843537415,
						"acc_norm,none": 0.272108843537415,
						"acc_norm_stderr,none": 0.036832239154550236,
						"acc_stderr,none": 0.036832239154550236,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2805755395683453,
						"acc_norm,none": 0.2805755395683453,
						"acc_norm_stderr,none": 0.03824529014900686,
						"acc_stderr,none": 0.03824529014900686,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.037502930030867444,
						"acc_stderr,none": 0.037502930030867444,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3374233128834356,
						"acc_norm,none": 0.3374233128834356,
						"acc_norm_stderr,none": 0.03714908409935573,
						"acc_stderr,none": 0.03714908409935573,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.03407826167337437,
						"acc_stderr,none": 0.03407826167337437,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.028746730632681364,
						"acc_stderr,none": 0.028746730632681364,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2828282828282828,
						"acc_norm,none": 0.2828282828282828,
						"acc_norm_stderr,none": 0.03208779558786751,
						"acc_stderr,none": 0.03208779558786751,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.39915966386554624,
						"acc_norm,none": 0.39915966386554624,
						"acc_norm_stderr,none": 0.03181110032413926,
						"acc_stderr,none": 0.03181110032413926,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.23478260869565218,
						"acc_norm,none": 0.23478260869565218,
						"acc_norm_stderr,none": 0.028009647070930118,
						"acc_stderr,none": 0.028009647070930118,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.34265734265734266,
						"acc_norm,none": 0.34265734265734266,
						"acc_norm_stderr,none": 0.039827381778096436,
						"acc_stderr,none": 0.039827381778096436,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2840909090909091,
						"acc_norm,none": 0.2840909090909091,
						"acc_norm_stderr,none": 0.034090909090909075,
						"acc_stderr,none": 0.034090909090909075,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3288590604026846,
						"acc_norm,none": 0.3288590604026846,
						"acc_norm_stderr,none": 0.03861721178313763,
						"acc_stderr,none": 0.03861721178313763,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.03661433360410718,
						"acc_stderr,none": 0.03661433360410718,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2966101694915254,
						"acc_norm,none": 0.2966101694915254,
						"acc_norm_stderr,none": 0.04222776832233627,
						"acc_stderr,none": 0.04222776832233627,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.03822127078536156,
						"acc_stderr,none": 0.03822127078536156,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.042163702135578345,
						"acc_stderr,none": 0.042163702135578345,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336699,
						"acc_stderr,none": 0.03333068663336699,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3488372093023256,
						"acc_norm,none": 0.3488372093023256,
						"acc_norm_stderr,none": 0.036446693486947866,
						"acc_stderr,none": 0.036446693486947866,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26763990267639903,
						"acc_norm,none": 0.26763990267639903,
						"acc_norm_stderr,none": 0.021864816663672664,
						"acc_stderr,none": 0.021864816663672664,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.38317757009345793,
						"acc_norm,none": 0.38317757009345793,
						"acc_norm_stderr,none": 0.03331120297324245,
						"acc_stderr,none": 0.03331120297324245,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2601626016260163,
						"acc_norm,none": 0.2601626016260163,
						"acc_norm_stderr,none": 0.039720129754505354,
						"acc_stderr,none": 0.039720129754505354,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.319672131147541,
						"acc_norm,none": 0.319672131147541,
						"acc_norm_stderr,none": 0.04239540943837383,
						"acc_stderr,none": 0.04239540943837383,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3380952380952381,
						"acc_norm,none": 0.3380952380952381,
						"acc_norm_stderr,none": 0.0327223237140444,
						"acc_stderr,none": 0.0327223237140444,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3055555555555556,
						"acc_norm,none": 0.3055555555555556,
						"acc_norm_stderr,none": 0.03443002441392583,
						"acc_stderr,none": 0.03443002441392583,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.328042328042328,
						"acc_norm,none": 0.328042328042328,
						"acc_norm_stderr,none": 0.0342418307585366,
						"acc_stderr,none": 0.0342418307585366,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.30344827586206896,
						"acc_norm,none": 0.30344827586206896,
						"acc_norm_stderr,none": 0.038312260488503336,
						"acc_stderr,none": 0.038312260488503336,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3523809523809524,
						"acc_norm,none": 0.3523809523809524,
						"acc_norm_stderr,none": 0.046843501394377526,
						"acc_stderr,none": 0.046843501394377526,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.29714285714285715,
						"acc_norm,none": 0.29714285714285715,
						"acc_norm_stderr,none": 0.034645078898843724,
						"acc_stderr,none": 0.034645078898843724,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846666,
						"acc_stderr,none": 0.030469670650846666,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.24202127659574468,
						"acc_norm,none": 0.24202127659574468,
						"acc_norm_stderr,none": 0.022117683921586976,
						"acc_stderr,none": 0.022117683921586976,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.35344827586206895,
						"acc_norm,none": 0.35344827586206895,
						"acc_norm_stderr,none": 0.031452746950022696,
						"acc_stderr,none": 0.031452746950022696,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.03613730415279119,
						"acc_stderr,none": 0.03613730415279119,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3230088495575221,
						"acc_norm,none": 0.3230088495575221,
						"acc_norm_stderr,none": 0.03117507071470539,
						"acc_stderr,none": 0.03117507071470539,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.035679697722680474,
						"acc_stderr,none": 0.035679697722680474,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2810810810810811,
						"acc_norm,none": 0.2810810810810811,
						"acc_norm_stderr,none": 0.03313956873549873,
						"acc_stderr,none": 0.03313956873549873,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.31952662721893493,
						"acc_norm,none": 0.31952662721893493,
						"acc_norm_stderr,none": 0.03597530251676528,
						"acc_stderr,none": 0.03597530251676528,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.03637652289278585,
						"acc_stderr,none": 0.03637652289278585,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.036121818481912725,
						"acc_stderr,none": 0.036121818481912725,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.020702674026557004,
						"mcc_stderr,none": 0.013130625586951826
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.03265986323710906,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5739489042401144,
						"acc_stderr,none": 0.013353323846468576,
						"alias": "glue",
						"f1,none": 0.6854371479110042,
						"f1_stderr,none": 0.0002108194612770564,
						"mcc,none": -0.020702674026557004,
						"mcc_stderr,none": 0.013130625586951826
					},
					"hellaswag": {
						"acc,none": 0.5279824736108345,
						"acc_norm,none": 0.708922525393348,
						"acc_norm_stderr,none": 0.004533307758521329,
						"acc_stderr,none": 0.004981961097590808,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7137589753541627,
						"acc_stderr,none": 0.01453009339507032,
						"alias": "lambada",
						"perplexity,none": 3.846826313125052,
						"perplexity_stderr,none": 0.2231406531831286
					},
					"lambada_multilingual": {
						"acc,none": 0.5321948379584707,
						"acc_stderr,none": 0.08553746830229633,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.586998037277183,
						"perplexity_stderr,none": 8.44185343935239
					},
					"lambada_openai": {
						"acc,none": 0.7413157384048127,
						"acc_stderr,none": 0.0061009671491424455,
						"alias": " - lambada_openai",
						"perplexity,none": 3.4288265320314064,
						"perplexity_stderr,none": 0.06731099002281415
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4218901610712206,
						"acc_stderr,none": 0.006880451721323677,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 35.180067307082794,
						"perplexity_stderr,none": 1.9589756221679824
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7391810595769455,
						"acc_stderr,none": 0.006117261570238606,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.430497510515627,
						"perplexity_stderr,none": 0.06733772832352031
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4405200853871531,
						"acc_stderr,none": 0.006916512722816757,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 30.182698041638368,
						"perplexity_stderr,none": 1.4931143791025014
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5453134096642732,
						"acc_stderr,none": 0.006937312121911725,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.875622904805315,
						"perplexity_stderr,none": 0.8205838192926675
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5140694740927615,
						"acc_stderr,none": 0.006963219279097559,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.266104422343826,
						"perplexity_stderr,none": 1.182212384610668
					},
					"lambada_standard": {
						"acc,none": 0.6875606442848826,
						"acc_stderr,none": 0.006457292279746489,
						"alias": " - lambada_standard",
						"perplexity,none": 4.2635880870384995,
						"perplexity_stderr,none": 0.0905481183049313
					},
					"logiqa": {
						"acc,none": 0.23963133640552994,
						"acc_norm,none": 0.282642089093702,
						"acc_norm_stderr,none": 0.017661585370360625,
						"acc_stderr,none": 0.01674276693510144,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3349950149551346,
						"acc_stderr,none": 0.062221516294539314,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.37777777777777777,
						"acc_stderr,none": 0.04188307537595853,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.2894736842105263,
						"acc_stderr,none": 0.036906779861372814,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3886792452830189,
						"acc_stderr,none": 0.030000485448675986,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3194444444444444,
						"acc_stderr,none": 0.038990736873573344,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621505,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2832369942196532,
						"acc_stderr,none": 0.034355680560478746,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.04280105837364396,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720683,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3574468085106383,
						"acc_stderr,none": 0.03132941789476425,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21052631578947367,
						"acc_stderr,none": 0.03835153954399421,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2896551724137931,
						"acc_stderr,none": 0.037800192304380135,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.25396825396825395,
						"acc_stderr,none": 0.022418042891113946,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3253968253968254,
						"acc_stderr,none": 0.041905964388711366,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3870967741935484,
						"acc_stderr,none": 0.027709359675032488,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.30049261083743845,
						"acc_stderr,none": 0.03225799476233483,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.042295258468165065,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.44242424242424244,
						"acc_stderr,none": 0.038783721137112745,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3888888888888889,
						"acc_stderr,none": 0.0347327959083696,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.44559585492227977,
						"acc_stderr,none": 0.03587014986075658,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.32051282051282054,
						"acc_stderr,none": 0.02366129639396428,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25555555555555554,
						"acc_stderr,none": 0.026593939101844075,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.31932773109243695,
						"acc_stderr,none": 0.030283995525884396,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23178807947019867,
						"acc_stderr,none": 0.034454062719870546,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3871559633027523,
						"acc_stderr,none": 0.02088423199264345,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.16203703703703703,
						"acc_stderr,none": 0.025130453652268455,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.45098039215686275,
						"acc_stderr,none": 0.03492406104163613,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.45569620253164556,
						"acc_stderr,none": 0.03241920684693334,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3721973094170404,
						"acc_stderr,none": 0.03244305283008731,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3816793893129771,
						"acc_stderr,none": 0.0426073515764456,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3275239107332625,
						"acc_stderr,none": 0.062372365253175376,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.36363636363636365,
						"acc_stderr,none": 0.043913262867240704,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04557239513497752,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3374233128834356,
						"acc_stderr,none": 0.03714908409935575,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.29464285714285715,
						"acc_stderr,none": 0.04327040932578729,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4563106796116505,
						"acc_stderr,none": 0.04931801994220416,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.405982905982906,
						"acc_stderr,none": 0.03217180182641086,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.44189016602809705,
						"acc_stderr,none": 0.017758800534214407,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3092485549132948,
						"acc_stderr,none": 0.024883140570071755,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.34967320261437906,
						"acc_stderr,none": 0.027305308076274695,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3730286449951722,
						"acc_stderr,none": 0.051216234291621164,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.40836012861736337,
						"acc_stderr,none": 0.027917050748484627,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.36728395061728397,
						"acc_stderr,none": 0.026822801759507894,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.26595744680851063,
						"acc_stderr,none": 0.026358065698880592,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.29986962190352023,
						"acc_stderr,none": 0.011702660860193986,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.35661764705882354,
						"acc_stderr,none": 0.02909720956841195,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3202614379084967,
						"acc_stderr,none": 0.018875682938069443,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.0469237132203465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3306122448979592,
						"acc_stderr,none": 0.030116426296540596,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.35976600584985374,
						"acc_stderr,none": 0.0531632696310427,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.4527363184079602,
						"acc_stderr,none": 0.03519702717576915,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2844909609895338,
						"acc_stderr,none": 0.059232012978177234,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.05009082659620333,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.0368078369072758,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.38596491228070173,
						"acc_stderr,none": 0.03733756969066164,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.382475802343352,
						"acc_stderr,none": 0.004905756019203567,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.37042310821806346,
						"acc_stderr,none": 0.004870511547029289,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7450980392156863,
						"acc_stderr,none": 0.02160210604737706,
						"alias": " - mrpc",
						"f1,none": 0.8395061728395061,
						"f1_stderr,none": 0.015518703016443315
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.013296398891966758,
						"exact_match_stderr,remove_whitespace": 0.0019066322439963406
					},
					"openbookqa": {
						"acc,none": 0.306,
						"acc_norm,none": 0.412,
						"acc_norm_stderr,none": 0.02203367799374087,
						"acc_stderr,none": 0.020629569998345393,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.395,
						"acc_stderr,none": 0.010933767586555723,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.381,
						"acc_stderr,none": 0.010861790333302172,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3705,
						"acc_stderr,none": 0.010801537464907352,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.01114361207351664,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.517,
						"acc_stderr,none": 0.011176670299310671,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.4765,
						"acc_stderr,none": 0.011170777418517836,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4825,
						"acc_stderr,none": 0.01117628425125418,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.45207142857142857,
						"acc_stderr,none": 0.053085560471253235,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7704026115342764,
						"acc_norm,none": 0.7742110990206746,
						"acc_norm_stderr,none": 0.009754980670917313,
						"acc_stderr,none": 0.009812682950815195,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7438353656923419,
						"acc_norm,none": 0.6293876042946833,
						"acc_norm_stderr,none": 0.010169978671853298,
						"acc_stderr,none": 0.13893458420677826,
						"alias": "pythia",
						"bits_per_byte,none": 0.6329570488421921,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.550740244183999,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4288265320314064,
						"perplexity_stderr,none": 0.06731099002281415,
						"word_perplexity,none": 10.444753759354695,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.4973457807065715,
						"acc_stderr,none": 0.006765315228093263,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6713826366559486,
						"acc_stderr,none": 0.0023360589955274593,
						"alias": " - qqp",
						"f1,none": 0.6841029055114366,
						"f1_stderr,none": 0.0025999694911024796
					},
					"record": {
						"alias": "record",
						"em,none": 0.2534,
						"em_stderr,none": 0.0043497972451150245,
						"f1,none": 0.26248714308440685,
						"f1_stderr,none": 0.004364116110744737
					},
					"rte": {
						"acc,none": 0.5956678700361011,
						"acc_stderr,none": 0.029540420517619723,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.957,
						"acc_norm,none": 0.93,
						"acc_norm_stderr,none": 0.00807249435832349,
						"acc_stderr,none": 0.006418114379799741,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9105504587155964,
						"acc_stderr,none": 0.009670122820901149,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3309072269382368,
						"acc_stderr,none": 0.0016765743781892778,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3561811505507956,
						"bleu_acc_stderr,none": 0.016763790728446335,
						"bleu_diff,none": -4.369185523968988,
						"bleu_diff_stderr,none": 0.9284895221805273,
						"bleu_max,none": 28.716266881587895,
						"bleu_max_stderr,none": 0.820444289892688,
						"rouge1_acc,none": 0.3243574051407589,
						"rouge1_acc_stderr,none": 0.01638797677964794,
						"rouge1_diff,none": -5.56010861966572,
						"rouge1_diff_stderr,none": 1.067267545016389,
						"rouge1_max,none": 54.12407918192328,
						"rouge1_max_stderr,none": 0.8672421342889551,
						"rouge2_acc,none": 0.28886168910648713,
						"rouge2_acc_stderr,none": 0.015866346401384308,
						"rouge2_diff,none": -7.235733073318749,
						"rouge2_diff_stderr,none": 1.259042617370475,
						"rouge2_max,none": 38.4329564656009,
						"rouge2_max_stderr,none": 1.051575772842756,
						"rougeL_acc,none": 0.33047735618115054,
						"rougeL_acc_stderr,none": 0.016466769613698303,
						"rougeL_diff,none": -5.610813209337879,
						"rougeL_diff_stderr,none": 1.0847061770112485,
						"rougeL_max,none": 51.53233532631266,
						"rougeL_max_stderr,none": 0.8919939083382645
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3561811505507956,
						"bleu_acc_stderr,none": 0.016763790728446335,
						"bleu_diff,none": -4.369185523968988,
						"bleu_diff_stderr,none": 0.9284895221805273,
						"bleu_max,none": 28.716266881587895,
						"bleu_max_stderr,none": 0.820444289892688,
						"rouge1_acc,none": 0.3243574051407589,
						"rouge1_acc_stderr,none": 0.01638797677964794,
						"rouge1_diff,none": -5.56010861966572,
						"rouge1_diff_stderr,none": 1.067267545016389,
						"rouge1_max,none": 54.12407918192328,
						"rouge1_max_stderr,none": 0.8672421342889551,
						"rouge2_acc,none": 0.28886168910648713,
						"rouge2_acc_stderr,none": 0.015866346401384308,
						"rouge2_diff,none": -7.235733073318749,
						"rouge2_diff_stderr,none": 1.259042617370475,
						"rouge2_max,none": 38.4329564656009,
						"rouge2_max_stderr,none": 1.051575772842756,
						"rougeL_acc,none": 0.33047735618115054,
						"rougeL_acc_stderr,none": 0.016466769613698303,
						"rougeL_diff,none": -5.610813209337879,
						"rougeL_diff_stderr,none": 1.0847061770112485,
						"rougeL_max,none": 51.53233532631266,
						"rougeL_max_stderr,none": 0.8919939083382645
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25458996328029376,
						"acc_stderr,none": 0.015250117079156494,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.40722449059618,
						"acc_stderr,none": 0.014384231587644099,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6329570488421921,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.550740244183999,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.444753759354695,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.67008681925809,
						"acc_stderr,none": 0.013214432542517536,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.43661971830985913,
						"acc_stderr,none": 0.0592793555841297,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5192307692307693,
						"acc_stderr,none": 0.049230010729780505,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6229090909090909,
						"acc_stderr,none": 0.07050144021381959,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.021814300984787635,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231336,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.02001121929807353,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.01996610354027946,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.022279694107843424,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.02210903931061855,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.021352091786223104,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.020395095484936603,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.0203950954849366,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.44061579651941096,
						"acc_stderr,none": 0.04924014074907689,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3365461847389558,
						"acc_stderr,none": 0.009471423054177138,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.46987951807228917,
						"acc_stderr,none": 0.01000387141951773,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4863453815261044,
						"acc_stderr,none": 0.010018334967148554,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39196787148594375,
						"acc_stderr,none": 0.009785342947722884,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5401606425702812,
						"acc_stderr,none": 0.009989691810169682,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4975903614457831,
						"acc_stderr,none": 0.010021956483068079,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4891566265060241,
						"acc_stderr,none": 0.010019715824483485,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.4353413654618474,
						"acc_stderr,none": 0.009937920221480502,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4939759036144578,
						"acc_stderr,none": 0.010021345444047586,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39116465863453814,
						"acc_stderr,none": 0.009781766322010003,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42128514056224897,
						"acc_stderr,none": 0.009897099560589198,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4598393574297189,
						"acc_stderr,none": 0.009989691810169671,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41365461847389556,
						"acc_stderr,none": 0.009871502159099368,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40843373493975904,
						"acc_stderr,none": 0.009852581919032238,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.37389558232931724,
						"acc_stderr,none": 0.009698087600721316,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6330545695204861,
						"acc_stderr,none": 0.06025268717757791,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.599602911978822,
						"acc_stderr,none": 0.012609238175551167,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7789543348775645,
						"acc_stderr,none": 0.010678457581809242,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7101257445400397,
						"acc_stderr,none": 0.011675728247959368,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5625413633355394,
						"acc_stderr,none": 0.01276607097454961,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6022501654533422,
						"acc_stderr,none": 0.012595197856703514,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6644606221045665,
						"acc_stderr,none": 0.01215116443816391,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5486432825943084,
						"acc_stderr,none": 0.012806088966122398,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6915949702183984,
						"acc_stderr,none": 0.011884972073313783,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.557246856386499,
						"acc_stderr,none": 0.012782510750319241,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5982792852415619,
						"acc_stderr,none": 0.012616114526927905,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6499007279947054,
						"acc_stderr,none": 0.012275258369751088,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8035513598561475,
						"acc_stderr,none": 0.03618061730130849,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8606451612903225,
						"acc_stderr,none": 0.007183813190863164,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109368,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7476538060479666,
						"acc_stderr,none": 0.01403349677309752,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.752851711026616,
						"acc_stderr,none": 0.026649120420793503,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6571428571428571,
						"acc_stderr,none": 0.026786851659200927,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7757936507936508,
						"acc_stderr,none": 0.018595723133309875,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/R4-7B-Base-32k-No-Mask_pth"
	},
	"./rwkv-x-dev/R4-7B-Base-No-Mask_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6358511837655016,
						"acc_norm,none": 0.637260428410372,
						"acc_norm_stderr,none": 0.09173956060720524,
						"acc_stderr,none": 0.10621964860183843,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3621875,
						"acc_stderr,none": 0.01625262676258344,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8394925373134329,
						"acc_stderr,none": 0.143649726448134,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.29597651528233465,
						"acc_norm,none": 0.29597651528233465,
						"acc_norm_stderr,none": 0.05188567340491968,
						"acc_stderr,none": 0.05188567340491968,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5710904001905669,
						"acc_stderr,none": 0.01303908896092451,
						"alias": "glue",
						"f1,none": 0.6825295435122881,
						"f1_stderr,none": 0.00021116281686981163,
						"mcc,none": -0.02339059069282274,
						"mcc_stderr,none": 0.02537286898986937
					},
					"lambada": {
						"acc,none": 0.7149233456239084,
						"acc_stderr,none": 0.015141664332583113,
						"alias": "lambada",
						"perplexity,none": 3.845696583335677,
						"perplexity_stderr,none": 0.22137801648405828
					},
					"lambada_multilingual": {
						"acc,none": 0.5374345041723269,
						"acc_stderr,none": 0.08509350438266727,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.26609806509869,
						"perplexity_stderr,none": 8.280585314931779
					},
					"mmlu": {
						"acc,none": 0.33983762996724115,
						"acc_stderr,none": 0.06245763464528406,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3258235919234856,
						"acc_stderr,none": 0.05971400904085346,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.38879948503379463,
						"acc_stderr,none": 0.04859852988677865,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.36691582710432247,
						"acc_stderr,none": 0.054823529933068396,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2860767522993974,
						"acc_stderr,none": 0.05833887923901244,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.45185714285714285,
						"acc_stderr,none": 0.060514736778599726,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7456429850469034,
						"acc_norm,none": 0.6408643962054856,
						"acc_norm_stderr,none": 0.010281796923917551,
						"acc_stderr,none": 0.13903249884852856,
						"alias": "pythia",
						"bits_per_byte,none": 0.6362235863958252,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5542553947313686,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.433928353577893,
						"perplexity_stderr,none": 0.0669897582709675,
						"word_perplexity,none": 10.571983869263562,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3269624619455129,
						"acc_stderr,none": 0.0015721480656785457,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3463892288861689,
						"bleu_acc_stderr,none": 0.016656997109125146,
						"bleu_diff,none": -5.403134069641461,
						"bleu_diff_stderr,none": 0.9163162347737959,
						"bleu_max,none": 28.312150099067384,
						"bleu_max_stderr,none": 0.8150660306316564,
						"rouge1_acc,none": 0.32068543451652387,
						"rouge1_acc_stderr,none": 0.016339170373280903,
						"rouge1_diff,none": -7.200918869944141,
						"rouge1_diff_stderr,none": 1.032989471866645,
						"rouge1_max,none": 53.612040412688636,
						"rouge1_max_stderr,none": 0.8697561873600266,
						"rouge2_acc,none": 0.28151774785801714,
						"rouge2_acc_stderr,none": 0.01574402724825605,
						"rouge2_diff,none": -8.871092762784391,
						"rouge2_diff_stderr,none": 1.2256704797855649,
						"rouge2_max,none": 37.745424935035636,
						"rouge2_max_stderr,none": 1.040539950716644,
						"rougeL_acc,none": 0.3182374541003672,
						"rougeL_acc_stderr,none": 0.016305988648920616,
						"rougeL_diff,none": -7.44121245050656,
						"rougeL_diff_stderr,none": 1.0503037757533595,
						"rougeL_max,none": 50.767644769923635,
						"rougeL_max_stderr,none": 0.8908346168272427
					},
					"xcopa": {
						"acc,none": 0.628,
						"acc_stderr,none": 0.07313992086574198,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4417938420348059,
						"acc_stderr,none": 0.048696196631956214,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6329342398170988,
						"acc_stderr,none": 0.0608734890936588,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8035513598561475,
						"acc_stderr,none": 0.03585374352455595,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6358511837655016,
						"acc_norm,none": 0.637260428410372,
						"acc_norm_stderr,none": 0.09173956060720524,
						"acc_stderr,none": 0.10621964860183843,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3621875,
						"acc_stderr,none": 0.01625262676258344,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.377,
						"acc_stderr,none": 0.015333170125779857,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.354,
						"acc_stderr,none": 0.015129868238451772,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3566666666666667,
						"acc_stderr,none": 0.013833742805050717,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4112627986348123,
						"acc_norm,none": 0.44368600682593856,
						"acc_norm_stderr,none": 0.014518421825670431,
						"acc_stderr,none": 0.014379441068522073,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7466329966329966,
						"acc_norm,none": 0.7327441077441077,
						"acc_norm_stderr,none": 0.00908046324601747,
						"acc_stderr,none": 0.008924765424529262,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8394925373134329,
						"acc_stderr,none": 0.143649726448134,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651512,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000143,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731972,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400252,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877366,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.598,
						"acc_stderr,none": 0.015512467135715075,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877364,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837956,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346927,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767684,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695458,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.005893957816165577,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280301,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832014,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.00431945108291062,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177546,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319324,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.013127502859696251,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.012486268734370145,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697598,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.842,
						"acc_stderr,none": 0.011539894677559549,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565916,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.015167928865407559,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936724,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.795,
						"acc_stderr,none": 0.012772554096113118,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.0146326386586329,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890129,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704159,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557428,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785141,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.015417317979911074,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.010640169792499363,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.015818160898606715,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.609,
						"acc_stderr,none": 0.015438826294681778,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.689,
						"acc_stderr,none": 0.014645596385722694,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706833,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341673,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695796,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847165,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.802,
						"acc_stderr,none": 0.012607733934175297,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.00648892179842742,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578159,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333356,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.525,
						"acc_stderr,none": 0.01579951342999601,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.005893957816165553,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400246,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.00396985639031942,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.01401329270272948,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.497,
						"acc_stderr,none": 0.015819015179246724,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160328,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118776,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.657,
						"acc_stderr,none": 0.015019206922356951,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837956,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397243,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.831,
						"acc_stderr,none": 0.011856625977890113,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.01180043432464461,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832011,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074798,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274702,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.0055683935750813675,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.015740004693383852,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.396,
						"acc_stderr,none": 0.01547331326585941,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.29597651528233465,
						"acc_norm,none": 0.29597651528233465,
						"acc_norm_stderr,none": 0.05188567340491968,
						"acc_stderr,none": 0.05188567340491968,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.30177514792899407,
						"acc_norm,none": 0.30177514792899407,
						"acc_norm_stderr,none": 0.03541479614288122,
						"acc_stderr,none": 0.03541479614288122,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.27702702702702703,
						"acc_norm,none": 0.27702702702702703,
						"acc_norm_stderr,none": 0.036911647897386525,
						"acc_stderr,none": 0.036911647897386525,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.03334645408665337,
						"acc_stderr,none": 0.03334645408665337,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.38125,
						"acc_norm,none": 0.38125,
						"acc_norm_stderr,none": 0.038518021388670956,
						"acc_stderr,none": 0.038518021388670956,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.03501438706296781,
						"acc_stderr,none": 0.03501438706296781,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.33014354066985646,
						"acc_norm,none": 0.33014354066985646,
						"acc_norm_stderr,none": 0.032606982441813086,
						"acc_stderr,none": 0.032606982441813086,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.21875,
						"acc_norm,none": 0.21875,
						"acc_norm_stderr,none": 0.032784644885244255,
						"acc_stderr,none": 0.032784644885244255,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3282442748091603,
						"acc_norm,none": 0.3282442748091603,
						"acc_norm_stderr,none": 0.04118438565806298,
						"acc_stderr,none": 0.04118438565806298,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.03921568627450978,
						"acc_stderr,none": 0.03921568627450978,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.0452235007738203,
						"acc_stderr,none": 0.0452235007738203,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.32507739938080493,
						"acc_norm,none": 0.32507739938080493,
						"acc_norm_stderr,none": 0.026103121097542564,
						"acc_stderr,none": 0.026103121097542564,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.03166009679399813,
						"acc_stderr,none": 0.03166009679399813,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2905027932960894,
						"acc_norm,none": 0.2905027932960894,
						"acc_norm_stderr,none": 0.03402831936794816,
						"acc_stderr,none": 0.03402831936794816,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25738396624472576,
						"acc_norm,none": 0.25738396624472576,
						"acc_norm_stderr,none": 0.028458820991460295,
						"acc_stderr,none": 0.028458820991460295,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004224,
						"acc_stderr,none": 0.04742907046004224,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.040191074725573483,
						"acc_stderr,none": 0.040191074725573483,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2454212454212454,
						"acc_norm,none": 0.2454212454212454,
						"acc_norm_stderr,none": 0.02609299388422865,
						"acc_stderr,none": 0.02609299388422865,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.03198001660115071,
						"acc_stderr,none": 0.03198001660115071,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2982456140350877,
						"acc_norm,none": 0.2982456140350877,
						"acc_norm_stderr,none": 0.035087719298245626,
						"acc_stderr,none": 0.035087719298245626,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.037115139596751764,
						"acc_stderr,none": 0.037115139596751764,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.28776978417266186,
						"acc_norm,none": 0.28776978417266186,
						"acc_norm_stderr,none": 0.03853836179233389,
						"acc_stderr,none": 0.03853836179233389,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3081761006289308,
						"acc_norm,none": 0.3081761006289308,
						"acc_norm_stderr,none": 0.03673404171124563,
						"acc_stderr,none": 0.03673404171124563,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.31901840490797545,
						"acc_norm,none": 0.31901840490797545,
						"acc_norm_stderr,none": 0.03661997551073836,
						"acc_stderr,none": 0.03661997551073836,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.03451628876250621,
						"acc_stderr,none": 0.03451628876250621,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.27380952380952384,
						"acc_norm,none": 0.27380952380952384,
						"acc_norm_stderr,none": 0.028145741115683867,
						"acc_stderr,none": 0.028145741115683867,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2828282828282828,
						"acc_norm,none": 0.2828282828282828,
						"acc_norm_stderr,none": 0.032087795587867514,
						"acc_stderr,none": 0.032087795587867514,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.40336134453781514,
						"acc_norm,none": 0.40336134453781514,
						"acc_norm_stderr,none": 0.031866081214088314,
						"acc_stderr,none": 0.031866081214088314,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.028361099300075063,
						"acc_stderr,none": 0.028361099300075063,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.037857144650666544,
						"acc_stderr,none": 0.037857144650666544,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.34265734265734266,
						"acc_norm,none": 0.34265734265734266,
						"acc_norm_stderr,none": 0.03982738177809643,
						"acc_stderr,none": 0.03982738177809643,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2840909090909091,
						"acc_norm,none": 0.2840909090909091,
						"acc_norm_stderr,none": 0.034090909090909075,
						"acc_stderr,none": 0.034090909090909075,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.31543624161073824,
						"acc_norm,none": 0.31543624161073824,
						"acc_norm_stderr,none": 0.03819723167141383,
						"acc_stderr,none": 0.03819723167141383,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.0374425492857706,
						"acc_stderr,none": 0.0374425492857706,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2966101694915254,
						"acc_norm,none": 0.2966101694915254,
						"acc_norm_stderr,none": 0.04222776832233627,
						"acc_stderr,none": 0.04222776832233627,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.041220665028782834,
						"acc_stderr,none": 0.041220665028782834,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.3253968253968254,
						"acc_norm,none": 0.3253968253968254,
						"acc_norm_stderr,none": 0.041905964388711366,
						"acc_stderr,none": 0.041905964388711366,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741764,
						"acc_stderr,none": 0.03351597731741764,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.32558139534883723,
						"acc_norm,none": 0.32558139534883723,
						"acc_norm_stderr,none": 0.03583410038767278,
						"acc_stderr,none": 0.03583410038767278,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.27007299270072993,
						"acc_norm,none": 0.27007299270072993,
						"acc_norm_stderr,none": 0.021927461972871154,
						"acc_stderr,none": 0.021927461972871154,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3878504672897196,
						"acc_norm,none": 0.3878504672897196,
						"acc_norm_stderr,none": 0.03338651735918191,
						"acc_stderr,none": 0.03338651735918191,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2845528455284553,
						"acc_norm,none": 0.2845528455284553,
						"acc_norm_stderr,none": 0.040849837332392225,
						"acc_stderr,none": 0.040849837332392225,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.36065573770491804,
						"acc_norm,none": 0.36065573770491804,
						"acc_norm_stderr,none": 0.04365370645566858,
						"acc_stderr,none": 0.04365370645566858,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.34285714285714286,
						"acc_norm,none": 0.34285714285714286,
						"acc_norm_stderr,none": 0.03283321069643155,
						"acc_stderr,none": 0.03283321069643155,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3277777777777778,
						"acc_norm,none": 0.3277777777777778,
						"acc_norm_stderr,none": 0.035084853738606925,
						"acc_stderr,none": 0.035084853738606925,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.328042328042328,
						"acc_norm,none": 0.328042328042328,
						"acc_norm_stderr,none": 0.0342418307585366,
						"acc_stderr,none": 0.0342418307585366,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.0399037253226882,
						"acc_stderr,none": 0.0399037253226882,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.31724137931034485,
						"acc_norm,none": 0.31724137931034485,
						"acc_norm_stderr,none": 0.038783523721386215,
						"acc_stderr,none": 0.038783523721386215,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.34285714285714286,
						"acc_norm,none": 0.34285714285714286,
						"acc_norm_stderr,none": 0.04654465622977447,
						"acc_stderr,none": 0.04654465622977447,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.03313334329221721,
						"acc_stderr,none": 0.03313334329221721,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846666,
						"acc_stderr,none": 0.030469670650846666,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.26063829787234044,
						"acc_norm,none": 0.26063829787234044,
						"acc_norm_stderr,none": 0.022668978836259783,
						"acc_stderr,none": 0.022668978836259783,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.34913793103448276,
						"acc_norm,none": 0.34913793103448276,
						"acc_norm_stderr,none": 0.03136440175753368,
						"acc_stderr,none": 0.03136440175753368,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3160919540229885,
						"acc_norm,none": 0.3160919540229885,
						"acc_norm_stderr,none": 0.035349438976908586,
						"acc_stderr,none": 0.035349438976908586,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.038850042458002554,
						"acc_stderr,none": 0.038850042458002554,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3230088495575221,
						"acc_norm,none": 0.3230088495575221,
						"acc_norm_stderr,none": 0.03117507071470539,
						"acc_stderr,none": 0.03117507071470539,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.036085410115739666,
						"acc_stderr,none": 0.036085410115739666,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.03253020905593337,
						"acc_stderr,none": 0.03253020905593337,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03560846537586734,
						"acc_stderr,none": 0.03560846537586734,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3105590062111801,
						"acc_norm,none": 0.3105590062111801,
						"acc_norm_stderr,none": 0.036581425432887386,
						"acc_stderr,none": 0.036581425432887386,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.02339059069282274,
						"mcc_stderr,none": 0.02537286898986937
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.033799766898963086,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5710904001905669,
						"acc_stderr,none": 0.01303908896092451,
						"alias": "glue",
						"f1,none": 0.6825295435122881,
						"f1_stderr,none": 0.00021116281686981163,
						"mcc,none": -0.02339059069282274,
						"mcc_stderr,none": 0.02537286898986937
					},
					"hellaswag": {
						"acc,none": 0.5252937661820355,
						"acc_norm,none": 0.7058354909380602,
						"acc_norm_stderr,none": 0.004547350179286252,
						"acc_stderr,none": 0.004983392650570965,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7149233456239084,
						"acc_stderr,none": 0.015141664332583113,
						"alias": "lambada",
						"perplexity,none": 3.845696583335677,
						"perplexity_stderr,none": 0.22137801648405828
					},
					"lambada_multilingual": {
						"acc,none": 0.5374345041723269,
						"acc_stderr,none": 0.08509350438266727,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.26609806509869,
						"perplexity_stderr,none": 8.280585314931779
					},
					"lambada_openai": {
						"acc,none": 0.7422860469629342,
						"acc_stderr,none": 0.006093498206249783,
						"alias": " - lambada_openai",
						"perplexity,none": 3.433928353577893,
						"perplexity_stderr,none": 0.0669897582709675
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4259654570153309,
						"acc_stderr,none": 0.006889191823711754,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.21491869676033,
						"perplexity_stderr,none": 1.878186670037439
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7418979235396856,
						"acc_stderr,none": 0.006096490478492319,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.432373306810292,
						"perplexity_stderr,none": 0.06694579039916938
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4449835047545119,
						"acc_stderr,none": 0.006923679791679078,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 30.433100544023983,
						"perplexity_stderr,none": 1.4904061137844005
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5530758781292451,
						"acc_stderr,none": 0.0069266194706025744,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.53818913678103,
						"perplexity_stderr,none": 0.7911878829346497
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5212497574228605,
						"acc_stderr,none": 0.006959683808965927,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.711908641117795,
						"perplexity_stderr,none": 1.1331402310711007
					},
					"lambada_standard": {
						"acc,none": 0.6873665825732583,
						"acc_stderr,none": 0.006458385716767269,
						"alias": " - lambada_standard",
						"perplexity,none": 4.259074703077097,
						"perplexity_stderr,none": 0.0899777774061101
					},
					"logiqa": {
						"acc,none": 0.23963133640552994,
						"acc_norm,none": 0.2872503840245776,
						"acc_norm_stderr,none": 0.017747701948846593,
						"acc_stderr,none": 0.016742766935101436,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.33983762996724115,
						"acc_stderr,none": 0.06245763464528406,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322695,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34074074074074073,
						"acc_stderr,none": 0.04094376269996793,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3092105263157895,
						"acc_stderr,none": 0.037610708698674805,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4188679245283019,
						"acc_stderr,none": 0.0303650508291152,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3263888888888889,
						"acc_stderr,none": 0.03921067198982266,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621505,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.32947976878612717,
						"acc_stderr,none": 0.03583901754736411,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.043364327079931785,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.35319148936170214,
						"acc_stderr,none": 0.031245325202761926,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.04142439719489361,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.3310344827586207,
						"acc_stderr,none": 0.039215453124671215,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2804232804232804,
						"acc_stderr,none": 0.02313528797432562,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.30952380952380953,
						"acc_stderr,none": 0.04134913018303316,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.04943110704237102,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3774193548387097,
						"acc_stderr,none": 0.02757596072327823,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.031447125816782426,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.42424242424242425,
						"acc_stderr,none": 0.038592681420702615,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.40404040404040403,
						"acc_stderr,none": 0.03496130972056127,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.48704663212435234,
						"acc_stderr,none": 0.036072280610477486,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.32564102564102565,
						"acc_stderr,none": 0.02375966576741229,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.026067159222275794,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.29831932773109243,
						"acc_stderr,none": 0.02971914287634286,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2185430463576159,
						"acc_stderr,none": 0.03374235550425694,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3871559633027523,
						"acc_stderr,none": 0.02088423199264345,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.17592592592592593,
						"acc_stderr,none": 0.025967420958258526,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.47058823529411764,
						"acc_stderr,none": 0.03503235296367992,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.42616033755274263,
						"acc_stderr,none": 0.03219035703131774,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.35874439461883406,
						"acc_stderr,none": 0.032190792004199956,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3816793893129771,
						"acc_stderr,none": 0.0426073515764456,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3258235919234856,
						"acc_stderr,none": 0.05971400904085346,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.3140495867768595,
						"acc_stderr,none": 0.042369647530410184,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3425925925925926,
						"acc_stderr,none": 0.045879047413018105,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.31901840490797545,
						"acc_stderr,none": 0.03661997551073836,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.25892857142857145,
						"acc_stderr,none": 0.04157751539865629,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4563106796116505,
						"acc_stderr,none": 0.049318019942204146,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.41452991452991456,
						"acc_stderr,none": 0.03227396567623778,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.454661558109834,
						"acc_stderr,none": 0.017806304585052602,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3092485549132948,
						"acc_stderr,none": 0.024883140570071755,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23687150837988827,
						"acc_stderr,none": 0.014219570788103986,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3431372549019608,
						"acc_stderr,none": 0.02718449890994161,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.38879948503379463,
						"acc_stderr,none": 0.04859852988677865,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.40192926045016075,
						"acc_stderr,none": 0.027846476005930487,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3734567901234568,
						"acc_stderr,none": 0.026915003011380147,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2730496453900709,
						"acc_stderr,none": 0.02657786094330786,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.303129074315515,
						"acc_stderr,none": 0.011738669951254301,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.39338235294117646,
						"acc_stderr,none": 0.029674288281311183,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.019070985589687492,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.39090909090909093,
						"acc_stderr,none": 0.0467375233367024,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.32653061224489793,
						"acc_stderr,none": 0.030021056238440313,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.36691582710432247,
						"acc_stderr,none": 0.054823529933068396,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.47761194029850745,
						"acc_stderr,none": 0.035319879302087305,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2860767522993974,
						"acc_stderr,none": 0.05833887923901244,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.04975698519562428,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3253012048192771,
						"acc_stderr,none": 0.03647168523683229,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.4093567251461988,
						"acc_stderr,none": 0.037712831076265434,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3811512990320937,
						"acc_stderr,none": 0.004902503553505843,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.36910089503661514,
						"acc_stderr,none": 0.004866913827094503,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7426470588235294,
						"acc_stderr,none": 0.02166998427065975,
						"alias": " - mrpc",
						"f1,none": 0.8367029548989113,
						"f1_stderr,none": 0.01572793754679602
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.023822714681440444,
						"exact_match_stderr,remove_whitespace": 0.0025384378369153653
					},
					"openbookqa": {
						"acc,none": 0.316,
						"acc_norm,none": 0.416,
						"acc_norm_stderr,none": 0.022064943313928866,
						"acc_stderr,none": 0.020812359515855854,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.384,
						"acc_stderr,none": 0.010878012942757035,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3585,
						"acc_stderr,none": 0.010725968403790009,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.372,
						"acc_stderr,none": 0.010810477936548653,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5475,
						"acc_stderr,none": 0.011132557743886098,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5315,
						"acc_stderr,none": 0.011160921022883274,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.473,
						"acc_stderr,none": 0.011166819105029986,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4965,
						"acc_stderr,none": 0.011182862030875934,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.45185714285714285,
						"acc_stderr,none": 0.060514736778599726,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7725788900979326,
						"acc_norm,none": 0.7774755168661589,
						"acc_norm_stderr,none": 0.009704600975718236,
						"acc_stderr,none": 0.009779850767847252,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7456429850469034,
						"acc_norm,none": 0.6408643962054856,
						"acc_norm_stderr,none": 0.010281796923917551,
						"acc_stderr,none": 0.13903249884852856,
						"alias": "pythia",
						"bits_per_byte,none": 0.6362235863958252,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5542553947313686,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.433928353577893,
						"perplexity_stderr,none": 0.0669897582709675,
						"word_perplexity,none": 10.571983869263562,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.5010067728354384,
						"acc_stderr,none": 0.006765396837036612,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6667326242888943,
						"acc_stderr,none": 0.0023443678360143866,
						"alias": " - qqp",
						"f1,none": 0.6811943971228469,
						"f1_stderr,none": 0.0026025810889297473
					},
					"record": {
						"alias": "record",
						"em,none": 0.2714,
						"em_stderr,none": 0.004447041895256011,
						"f1,none": 0.28084857166707516,
						"f1_stderr,none": 0.004457544188951761
					},
					"rte": {
						"acc,none": 0.5848375451263538,
						"acc_stderr,none": 0.02966006629089349,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.955,
						"acc_norm,none": 0.93,
						"acc_norm_stderr,none": 0.008072494358323494,
						"acc_stderr,none": 0.0065588122414061145,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9174311926605505,
						"acc_stderr,none": 0.009325791021628806,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3269624619455129,
						"acc_stderr,none": 0.0015721480656785457,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3463892288861689,
						"bleu_acc_stderr,none": 0.016656997109125146,
						"bleu_diff,none": -5.403134069641461,
						"bleu_diff_stderr,none": 0.9163162347737959,
						"bleu_max,none": 28.312150099067384,
						"bleu_max_stderr,none": 0.8150660306316564,
						"rouge1_acc,none": 0.32068543451652387,
						"rouge1_acc_stderr,none": 0.016339170373280903,
						"rouge1_diff,none": -7.200918869944141,
						"rouge1_diff_stderr,none": 1.032989471866645,
						"rouge1_max,none": 53.612040412688636,
						"rouge1_max_stderr,none": 0.8697561873600266,
						"rouge2_acc,none": 0.28151774785801714,
						"rouge2_acc_stderr,none": 0.01574402724825605,
						"rouge2_diff,none": -8.871092762784391,
						"rouge2_diff_stderr,none": 1.2256704797855649,
						"rouge2_max,none": 37.745424935035636,
						"rouge2_max_stderr,none": 1.040539950716644,
						"rougeL_acc,none": 0.3182374541003672,
						"rougeL_acc_stderr,none": 0.016305988648920616,
						"rougeL_diff,none": -7.44121245050656,
						"rougeL_diff_stderr,none": 1.0503037757533595,
						"rougeL_max,none": 50.767644769923635,
						"rougeL_max_stderr,none": 0.8908346168272427
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3463892288861689,
						"bleu_acc_stderr,none": 0.016656997109125146,
						"bleu_diff,none": -5.403134069641461,
						"bleu_diff_stderr,none": 0.9163162347737959,
						"bleu_max,none": 28.312150099067384,
						"bleu_max_stderr,none": 0.8150660306316564,
						"rouge1_acc,none": 0.32068543451652387,
						"rouge1_acc_stderr,none": 0.016339170373280903,
						"rouge1_diff,none": -7.200918869944141,
						"rouge1_diff_stderr,none": 1.032989471866645,
						"rouge1_max,none": 53.612040412688636,
						"rouge1_max_stderr,none": 0.8697561873600266,
						"rouge2_acc,none": 0.28151774785801714,
						"rouge2_acc_stderr,none": 0.01574402724825605,
						"rouge2_diff,none": -8.871092762784391,
						"rouge2_diff_stderr,none": 1.2256704797855649,
						"rouge2_max,none": 37.745424935035636,
						"rouge2_max_stderr,none": 1.040539950716644,
						"rougeL_acc,none": 0.3182374541003672,
						"rougeL_acc_stderr,none": 0.016305988648920616,
						"rougeL_diff,none": -7.44121245050656,
						"rougeL_diff_stderr,none": 1.0503037757533595,
						"rougeL_max,none": 50.767644769923635,
						"rougeL_max_stderr,none": 0.8908346168272427
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2533659730722154,
						"acc_stderr,none": 0.015225899340826845,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4005589508188104,
						"acc_stderr,none": 0.014242692386578538,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6362235863958252,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5542553947313686,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.571983869263562,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6764009471191792,
						"acc_stderr,none": 0.01314888332092315,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.43661971830985913,
						"acc_stderr,none": 0.0592793555841297,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.04926646390821466,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.628,
						"acc_stderr,none": 0.07313992086574198,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.021772369465547194,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.01992048320956607,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.019827714859587574,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.02226169729227013,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.02204949796982787,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.668,
						"acc_stderr,none": 0.021081766571222856,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.020313179231745183,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.02014357284729078,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4417938420348059,
						"acc_stderr,none": 0.048696196631956214,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667055,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4755020080321285,
						"acc_stderr,none": 0.010010036112667858,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.485140562248996,
						"acc_stderr,none": 0.01001764608425538,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3967871485943775,
						"acc_stderr,none": 0.009806220246670027,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5357429718875502,
						"acc_stderr,none": 0.009996432468510353,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4931726907630522,
						"acc_stderr,none": 0.010021138522919167,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4959839357429719,
						"acc_stderr,none": 0.010021749574555894,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.4285140562248996,
						"acc_stderr,none": 0.009919113605650922,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4907630522088353,
						"acc_stderr,none": 0.010020362530631355,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.40080321285140563,
						"acc_stderr,none": 0.009822858473047374,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.43293172690763054,
						"acc_stderr,none": 0.009931501976863056,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.46546184738955826,
						"acc_stderr,none": 0.009998133936261184,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41807228915662653,
						"acc_stderr,none": 0.009886618180256034,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40602409638554215,
						"acc_stderr,none": 0.00984346200738422,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3682730923694779,
						"acc_stderr,none": 0.009668013178998446,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6329342398170988,
						"acc_stderr,none": 0.0608734890936588,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.6029119788219722,
						"acc_stderr,none": 0.012591627740247465,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7829252150893448,
						"acc_stderr,none": 0.010609046579012718,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7101257445400397,
						"acc_stderr,none": 0.011675728247959366,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5645268034414295,
						"acc_stderr,none": 0.012759525506489233,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6055592322964924,
						"acc_stderr,none": 0.012577106513936129,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6624751819986764,
						"acc_stderr,none": 0.012168840221678032,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5413633355393779,
						"acc_stderr,none": 0.012823020340169817,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.685638649900728,
						"acc_stderr,none": 0.011947409363762422,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5598941098610192,
						"acc_stderr,none": 0.012774475160716336,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5929847782925215,
						"acc_stderr,none": 0.01264266483681693,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6538716082064858,
						"acc_stderr,none": 0.012242676637496357,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8035513598561475,
						"acc_stderr,none": 0.03585374352455595,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8593548387096774,
						"acc_stderr,none": 0.007211582882146504,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7228915662650602,
						"acc_stderr,none": 0.049425892997830935,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7466110531803962,
						"acc_stderr,none": 0.01405265182922641,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7566539923954373,
						"acc_stderr,none": 0.02651002461891976,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6603174603174603,
						"acc_stderr,none": 0.026726874754294024,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7817460317460317,
						"acc_stderr,none": 0.018417468024139707,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/R4-7B-Base-No-Mask_pth"
	},
	"./rwkv-x-dev/R4-7B-Base-With-Mask_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6338782412626832,
						"acc_norm,none": 0.633314543404735,
						"acc_norm_stderr,none": 0.09029956855235052,
						"acc_stderr,none": 0.10569725474280342,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3603125,
						"acc_stderr,none": 0.0166278796782515,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8390597014925373,
						"acc_stderr,none": 0.14320506541850453,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2957174926610257,
						"acc_norm,none": 0.2957174926610257,
						"acc_norm_stderr,none": 0.053299934340383194,
						"acc_stderr,none": 0.053299934340383194,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5675172701286327,
						"acc_stderr,none": 0.013061563851142016,
						"alias": "glue",
						"f1,none": 0.6805422864078293,
						"f1_stderr,none": 0.00020959594592049902,
						"mcc,none": -0.02339059069282274,
						"mcc_stderr,none": 0.02537286898986937
					},
					"lambada": {
						"acc,none": 0.7152144381913449,
						"acc_stderr,none": 0.01474489884440239,
						"alias": "lambada",
						"perplexity,none": 3.8476110332770506,
						"perplexity_stderr,none": 0.22063603683924984
					},
					"lambada_multilingual": {
						"acc,none": 0.5373180671453522,
						"acc_stderr,none": 0.08523266203444416,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.31615753055653,
						"perplexity_stderr,none": 8.298765482730351
					},
					"mmlu": {
						"acc,none": 0.3439680957128614,
						"acc_stderr,none": 0.06340013035322513,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3321997874601488,
						"acc_stderr,none": 0.06276756560204373,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.39137431606050854,
						"acc_stderr,none": 0.04697062313782774,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.36886577835554113,
						"acc_stderr,none": 0.05619273710950392,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2905169679670156,
						"acc_stderr,none": 0.05954930920623147,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4507857142857143,
						"acc_stderr,none": 0.06106676645703439,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7458966934943291,
						"acc_norm,none": 0.6370611422748406,
						"acc_norm_stderr,none": 0.010081877708510817,
						"acc_stderr,none": 0.1390221629887055,
						"alias": "pythia",
						"bits_per_byte,none": 0.6363476745780474,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5543890841221912,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4342308757822018,
						"perplexity_stderr,none": 0.06693174674753623,
						"word_perplexity,none": 10.576847482578392,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.32941774242352867,
						"acc_stderr,none": 0.0016185082452014232,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.35128518971848227,
						"bleu_acc_stderr,none": 0.0167113581635444,
						"bleu_diff,none": -5.073626658121167,
						"bleu_diff_stderr,none": 0.9188952783988822,
						"bleu_max,none": 28.371862716234155,
						"bleu_max_stderr,none": 0.8138253180980356,
						"rouge1_acc,none": 0.3243574051407589,
						"rouge1_acc_stderr,none": 0.01638797677964794,
						"rouge1_diff,none": -6.590060734936701,
						"rouge1_diff_stderr,none": 1.043618044751592,
						"rouge1_max,none": 53.787449919973184,
						"rouge1_max_stderr,none": 0.870627841172565,
						"rouge2_acc,none": 0.28886168910648713,
						"rouge2_acc_stderr,none": 0.015866346401384308,
						"rouge2_diff,none": -8.197533631539986,
						"rouge2_diff_stderr,none": 1.235838968512957,
						"rouge2_max,none": 38.016601381205575,
						"rouge2_max_stderr,none": 1.045156582546898,
						"rougeL_acc,none": 0.3243574051407589,
						"rougeL_acc_stderr,none": 0.01638797677964794,
						"rougeL_diff,none": -6.812687503821623,
						"rougeL_diff_stderr,none": 1.0599226404594715,
						"rougeL_max,none": 50.97546516149105,
						"rougeL_max_stderr,none": 0.8953741526473659
					},
					"xcopa": {
						"acc,none": 0.6261818181818182,
						"acc_stderr,none": 0.0727927369933245,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4416331994645248,
						"acc_stderr,none": 0.049526967860546896,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6323927561518561,
						"acc_stderr,none": 0.060764631830965705,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8042256686895932,
						"acc_stderr,none": 0.035867365106512665,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6338782412626832,
						"acc_norm,none": 0.633314543404735,
						"acc_norm_stderr,none": 0.09029956855235052,
						"acc_stderr,none": 0.10569725474280342,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3603125,
						"acc_stderr,none": 0.0166278796782515,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.377,
						"acc_stderr,none": 0.015333170125779855,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.351,
						"acc_stderr,none": 0.015100563798316405,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3541666666666667,
						"acc_stderr,none": 0.013811933499570956,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4104095563139932,
						"acc_norm,none": 0.44283276450511944,
						"acc_norm_stderr,none": 0.014515573873348916,
						"acc_stderr,none": 0.014374922192642659,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7441077441077442,
						"acc_norm,none": 0.7272727272727273,
						"acc_norm_stderr,none": 0.00913863072636423,
						"acc_stderr,none": 0.008953950243013993,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8390597014925373,
						"acc_stderr,none": 0.14320506541850453,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592078,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426596,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000143,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.824,
						"acc_stderr,none": 0.012048616898597512,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151138,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425668,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.604,
						"acc_stderr,none": 0.015473313265859406,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877364,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.864,
						"acc_stderr,none": 0.010845350230472988,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346927,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.003299983316607816,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.005893957816165556,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679196,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280302,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697593,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.00420638724961148,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244059,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.012583693787968133,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.01323250161908534,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.012510816141264366,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.011419913065098694,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.00396985639031942,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.374,
						"acc_stderr,none": 0.015308767369006368,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343968,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099186,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.691,
						"acc_stderr,none": 0.014619600977206488,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.011171786285496497,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333377,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323506,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286429,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524282,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.611,
						"acc_stderr,none": 0.015424555647308495,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.01053479862085576,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.015817274929209008,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.603,
						"acc_stderr,none": 0.015480007449307982,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.014658474370509001,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.0074548356504067275,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.01120341539516034,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.00940661918462122,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785129,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.798,
						"acc_stderr,none": 0.012702651587655128,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033855,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578159,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118574,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.824,
						"acc_stderr,none": 0.012048616898597509,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.521,
						"acc_stderr,none": 0.015805341148131296,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389625,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651535,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.00396985639031942,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.013912208651021359,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.497,
						"acc_stderr,none": 0.015819015179246724,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.011107987548939149,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163044,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.653,
						"acc_stderr,none": 0.015060472031706624,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837957,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724444,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.829,
						"acc_stderr,none": 0.011912216456264611,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.831,
						"acc_stderr,none": 0.011856625977890112,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177546,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.00882342636694231,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698454,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452372,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.015704987954361798,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.396,
						"acc_stderr,none": 0.015473313265859412,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2957174926610257,
						"acc_norm,none": 0.2957174926610257,
						"acc_norm_stderr,none": 0.053299934340383194,
						"acc_stderr,none": 0.053299934340383194,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.30177514792899407,
						"acc_norm,none": 0.30177514792899407,
						"acc_norm_stderr,none": 0.03541479614288122,
						"acc_stderr,none": 0.03541479614288122,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.28378378378378377,
						"acc_norm,none": 0.28378378378378377,
						"acc_norm_stderr,none": 0.03718409321285373,
						"acc_stderr,none": 0.03718409321285373,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.40625,
						"acc_norm,none": 0.40625,
						"acc_norm_stderr,none": 0.03894932504400619,
						"acc_stderr,none": 0.03894932504400619,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03453131801885415,
						"acc_stderr,none": 0.03453131801885415,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.32057416267942584,
						"acc_norm,none": 0.32057416267942584,
						"acc_norm_stderr,none": 0.03235963541722357,
						"acc_stderr,none": 0.03235963541722357,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2125,
						"acc_norm,none": 0.2125,
						"acc_norm_stderr,none": 0.03244189290245472,
						"acc_stderr,none": 0.03244189290245472,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.32061068702290074,
						"acc_norm,none": 0.32061068702290074,
						"acc_norm_stderr,none": 0.040933292298342784,
						"acc_stderr,none": 0.040933292298342784,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3014705882352941,
						"acc_norm,none": 0.3014705882352941,
						"acc_norm_stderr,none": 0.039495529298273935,
						"acc_stderr,none": 0.039495529298273935,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.0452235007738203,
						"acc_stderr,none": 0.0452235007738203,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3219814241486068,
						"acc_norm,none": 0.3219814241486068,
						"acc_norm_stderr,none": 0.02603803874433866,
						"acc_stderr,none": 0.02603803874433866,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.03166009679399813,
						"acc_stderr,none": 0.03166009679399813,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2905027932960894,
						"acc_norm,none": 0.2905027932960894,
						"acc_norm_stderr,none": 0.03402831936794816,
						"acc_stderr,none": 0.03402831936794816,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.028304657943035296,
						"acc_stderr,none": 0.028304657943035296,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.38317757009345793,
						"acc_norm,none": 0.38317757009345793,
						"acc_norm_stderr,none": 0.047220130807712334,
						"acc_stderr,none": 0.047220130807712334,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.040191074725573483,
						"acc_stderr,none": 0.040191074725573483,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.02582505450222104,
						"acc_stderr,none": 0.02582505450222104,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.30392156862745096,
						"acc_norm,none": 0.30392156862745096,
						"acc_norm_stderr,none": 0.03228210387037893,
						"acc_stderr,none": 0.03228210387037893,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2807017543859649,
						"acc_norm,none": 0.2807017543859649,
						"acc_norm_stderr,none": 0.034462962170884265,
						"acc_stderr,none": 0.034462962170884265,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.272108843537415,
						"acc_norm,none": 0.272108843537415,
						"acc_norm_stderr,none": 0.036832239154550236,
						"acc_stderr,none": 0.036832239154550236,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2949640287769784,
						"acc_norm,none": 0.2949640287769784,
						"acc_norm_stderr,none": 0.03881956126735706,
						"acc_stderr,none": 0.03881956126735706,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3081761006289308,
						"acc_norm,none": 0.3081761006289308,
						"acc_norm_stderr,none": 0.03673404171124563,
						"acc_stderr,none": 0.03673404171124563,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.32515337423312884,
						"acc_norm,none": 0.32515337423312884,
						"acc_norm_stderr,none": 0.036803503712864616,
						"acc_stderr,none": 0.036803503712864616,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.03430085607014882,
						"acc_stderr,none": 0.03430085607014882,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.27380952380952384,
						"acc_norm,none": 0.27380952380952384,
						"acc_norm_stderr,none": 0.028145741115683867,
						"acc_stderr,none": 0.028145741115683867,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.29292929292929293,
						"acc_norm,none": 0.29292929292929293,
						"acc_norm_stderr,none": 0.032424979581788166,
						"acc_stderr,none": 0.032424979581788166,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.40336134453781514,
						"acc_norm,none": 0.40336134453781514,
						"acc_norm_stderr,none": 0.031866081214088314,
						"acc_stderr,none": 0.031866081214088314,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2391304347826087,
						"acc_norm,none": 0.2391304347826087,
						"acc_norm_stderr,none": 0.028187385293933942,
						"acc_stderr,none": 0.028187385293933942,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.037857144650666544,
						"acc_stderr,none": 0.037857144650666544,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.34265734265734266,
						"acc_norm,none": 0.34265734265734266,
						"acc_norm_stderr,none": 0.03982738177809643,
						"acc_stderr,none": 0.03982738177809643,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2840909090909091,
						"acc_norm,none": 0.2840909090909091,
						"acc_norm_stderr,none": 0.034090909090909075,
						"acc_stderr,none": 0.034090909090909075,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3087248322147651,
						"acc_norm,none": 0.3087248322147651,
						"acc_norm_stderr,none": 0.03797348027213082,
						"acc_stderr,none": 0.03797348027213082,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552485,
						"acc_stderr,none": 0.03703667194552485,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.288135593220339,
						"acc_norm,none": 0.288135593220339,
						"acc_norm_stderr,none": 0.04187011593049809,
						"acc_stderr,none": 0.04187011593049809,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.041220665028782834,
						"acc_stderr,none": 0.041220665028782834,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.03822127078536156,
						"acc_stderr,none": 0.03822127078536156,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.3412698412698413,
						"acc_norm,none": 0.3412698412698413,
						"acc_norm_stderr,none": 0.04240799327574923,
						"acc_stderr,none": 0.04240799327574923,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.03369553691877718,
						"acc_stderr,none": 0.03369553691877718,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3372093023255814,
						"acc_norm,none": 0.3372093023255814,
						"acc_norm_stderr,none": 0.03615263198871634,
						"acc_stderr,none": 0.03615263198871634,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.27007299270072993,
						"acc_norm,none": 0.27007299270072993,
						"acc_norm_stderr,none": 0.021927461972871154,
						"acc_stderr,none": 0.021927461972871154,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.397196261682243,
						"acc_norm,none": 0.397196261682243,
						"acc_norm_stderr,none": 0.033527466939507825,
						"acc_stderr,none": 0.033527466939507825,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2601626016260163,
						"acc_norm,none": 0.2601626016260163,
						"acc_norm_stderr,none": 0.039720129754505354,
						"acc_stderr,none": 0.039720129754505354,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.36065573770491804,
						"acc_norm,none": 0.36065573770491804,
						"acc_norm_stderr,none": 0.04365370645566858,
						"acc_stderr,none": 0.04365370645566858,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03260773253630123,
						"acc_stderr,none": 0.03260773253630123,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.32222222222222224,
						"acc_norm,none": 0.32222222222222224,
						"acc_norm_stderr,none": 0.03492970288642683,
						"acc_stderr,none": 0.03492970288642683,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3386243386243386,
						"acc_norm,none": 0.3386243386243386,
						"acc_norm_stderr,none": 0.03451471285997054,
						"acc_stderr,none": 0.03451471285997054,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.0399037253226882,
						"acc_stderr,none": 0.0399037253226882,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.30344827586206896,
						"acc_norm,none": 0.30344827586206896,
						"acc_norm_stderr,none": 0.038312260488503336,
						"acc_stderr,none": 0.038312260488503336,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.04622501635210239,
						"acc_stderr,none": 0.04622501635210239,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.0340385177358705,
						"acc_stderr,none": 0.0340385177358705,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26066350710900477,
						"acc_norm,none": 0.26066350710900477,
						"acc_norm_stderr,none": 0.030293645661742804,
						"acc_stderr,none": 0.030293645661742804,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.26063829787234044,
						"acc_norm,none": 0.26063829787234044,
						"acc_norm_stderr,none": 0.022668978836259783,
						"acc_stderr,none": 0.022668978836259783,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.0312732353098133,
						"acc_stderr,none": 0.0312732353098133,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3218390804597701,
						"acc_norm,none": 0.3218390804597701,
						"acc_norm_stderr,none": 0.03551916251914105,
						"acc_stderr,none": 0.03551916251914105,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.038850042458002554,
						"acc_stderr,none": 0.038850042458002554,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3185840707964602,
						"acc_norm,none": 0.3185840707964602,
						"acc_norm_stderr,none": 0.03106182084032612,
						"acc_stderr,none": 0.03106182084032612,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.32727272727272727,
						"acc_norm,none": 0.32727272727272727,
						"acc_norm_stderr,none": 0.03663974994391242,
						"acc_stderr,none": 0.03663974994391242,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.032092816451453864,
						"acc_stderr,none": 0.032092816451453864,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.31952662721893493,
						"acc_norm,none": 0.31952662721893493,
						"acc_norm_stderr,none": 0.03597530251676527,
						"acc_stderr,none": 0.03597530251676527,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3105590062111801,
						"acc_norm,none": 0.3105590062111801,
						"acc_norm_stderr,none": 0.036581425432887386,
						"acc_stderr,none": 0.036581425432887386,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.02339059069282274,
						"mcc_stderr,none": 0.02537286898986937
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.033799766898963086,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5675172701286327,
						"acc_stderr,none": 0.013061563851142016,
						"alias": "glue",
						"f1,none": 0.6805422864078293,
						"f1_stderr,none": 0.00020959594592049902,
						"mcc,none": -0.02339059069282274,
						"mcc_stderr,none": 0.02537286898986937
					},
					"hellaswag": {
						"acc,none": 0.5247958573989245,
						"acc_norm,none": 0.7056363274248157,
						"acc_norm_stderr,none": 0.004548247487546319,
						"acc_stderr,none": 0.004983641854351153,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7152144381913449,
						"acc_stderr,none": 0.01474489884440239,
						"alias": "lambada",
						"perplexity,none": 3.8476110332770506,
						"perplexity_stderr,none": 0.22063603683924984
					},
					"lambada_multilingual": {
						"acc,none": 0.5373180671453522,
						"acc_stderr,none": 0.08523266203444416,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.31615753055653,
						"perplexity_stderr,none": 8.298765482730351
					},
					"lambada_openai": {
						"acc,none": 0.7430622938094315,
						"acc_stderr,none": 0.006087494839873366,
						"alias": " - lambada_openai",
						"perplexity,none": 3.4342308757822018,
						"perplexity_stderr,none": 0.06693174674753623
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4261595187269552,
						"acc_stderr,none": 0.006889596071653633,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.27318762892695,
						"perplexity_stderr,none": 1.8804933516626878
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7420919852513099,
						"acc_stderr,none": 0.006094995125652961,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.4347111171395124,
						"perplexity_stderr,none": 0.0670313420410076
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4438191344847662,
						"acc_stderr,none": 0.006921864695286307,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 30.527926925101546,
						"perplexity_stderr,none": 1.4951857881659434
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5532699398408694,
						"acc_stderr,none": 0.006926330307977034,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.59106113031019,
						"perplexity_stderr,none": 0.7939507400155539
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5212497574228605,
						"acc_stderr,none": 0.006959683808965927,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.753900851304437,
						"perplexity_stderr,none": 1.1348470483829822
					},
					"lambada_standard": {
						"acc,none": 0.688530952843004,
						"acc_stderr,none": 0.006451805320261258,
						"alias": " - lambada_standard",
						"perplexity,none": 4.259358249768517,
						"perplexity_stderr,none": 0.08999395316940734
					},
					"logiqa": {
						"acc,none": 0.22887864823348694,
						"acc_norm,none": 0.28417818740399386,
						"acc_norm_stderr,none": 0.01769054268019077,
						"acc_stderr,none": 0.016478107276313263,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3439680957128614,
						"acc_stderr,none": 0.06340013035322513,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322695,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.37037037037037035,
						"acc_stderr,none": 0.041716541613545426,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3026315789473684,
						"acc_stderr,none": 0.03738520676119667,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.04793724854411019,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.03015113445777628,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.039420826399272135,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909282,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.35260115606936415,
						"acc_stderr,none": 0.03643037168958548,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.043364327079931785,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695236,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.35319148936170214,
						"acc_stderr,none": 0.031245325202761926,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21929824561403508,
						"acc_stderr,none": 0.03892431106518752,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.32413793103448274,
						"acc_stderr,none": 0.03900432069185555,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.023068188848261128,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3253968253968254,
						"acc_stderr,none": 0.041905964388711366,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.04943110704237102,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3903225806451613,
						"acc_stderr,none": 0.027751256636969573,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.0317852971064275,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.41818181818181815,
						"acc_stderr,none": 0.03851716319398394,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.40404040404040403,
						"acc_stderr,none": 0.03496130972056127,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.46632124352331605,
						"acc_stderr,none": 0.03600244069867179,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.33589743589743587,
						"acc_stderr,none": 0.023946724741563973,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23703703703703705,
						"acc_stderr,none": 0.025928876132766104,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.029597329730978093,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23178807947019867,
						"acc_stderr,none": 0.034454062719870546,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3926605504587156,
						"acc_stderr,none": 0.020937505161201093,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.18055555555555555,
						"acc_stderr,none": 0.026232878971491666,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.47549019607843135,
						"acc_stderr,none": 0.035050931943487976,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.4472573839662447,
						"acc_stderr,none": 0.03236564251614192,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3632286995515695,
						"acc_stderr,none": 0.032277904428505,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.40458015267175573,
						"acc_stderr,none": 0.043046937953806645,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3321997874601488,
						"acc_stderr,none": 0.06276756560204373,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.32231404958677684,
						"acc_stderr,none": 0.042664163633521685,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.37962962962962965,
						"acc_stderr,none": 0.04691521224077742,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3312883435582822,
						"acc_stderr,none": 0.03697983910025588,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.25892857142857145,
						"acc_stderr,none": 0.04157751539865629,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.44660194174757284,
						"acc_stderr,none": 0.04922424153458934,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.4230769230769231,
						"acc_stderr,none": 0.032366121762202014,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.04943110704237101,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.45977011494252873,
						"acc_stderr,none": 0.01782199409693354,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3092485549132948,
						"acc_stderr,none": 0.024883140570071755,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.34967320261437906,
						"acc_stderr,none": 0.027305308076274695,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.39137431606050854,
						"acc_stderr,none": 0.04697062313782774,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.42765273311897106,
						"acc_stderr,none": 0.028099240775809577,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3734567901234568,
						"acc_stderr,none": 0.02691500301138015,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2765957446808511,
						"acc_stderr,none": 0.02668456434046099,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3070404172099087,
						"acc_stderr,none": 0.01178095911451377,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.38235294117647056,
						"acc_stderr,none": 0.029520095697687765,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.019070985589687492,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4090909090909091,
						"acc_stderr,none": 0.04709306978661895,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.34285714285714286,
						"acc_stderr,none": 0.030387262919547728,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.36886577835554113,
						"acc_stderr,none": 0.05619273710950392,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.46766169154228854,
						"acc_stderr,none": 0.035281314729336065,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2905169679670156,
						"acc_stderr,none": 0.05954930920623147,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.05,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3433734939759036,
						"acc_stderr,none": 0.036965843170106004,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.4152046783625731,
						"acc_stderr,none": 0.03779275945503201,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3760570555272542,
						"acc_stderr,none": 0.004889633216634707,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.36574450772986167,
						"acc_stderr,none": 0.004857604775791876,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7377450980392157,
						"acc_stderr,none": 0.02180307601533612,
						"alias": " - mrpc",
						"f1,none": 0.834108527131783,
						"f1_stderr,none": 0.015816368969533805
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.019113573407202215,
						"exact_match_stderr,remove_whitespace": 0.0022792232467890635
					},
					"openbookqa": {
						"acc,none": 0.314,
						"acc_norm,none": 0.418,
						"acc_norm_stderr,none": 0.02208001481222814,
						"acc_stderr,none": 0.020776701920308997,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.398,
						"acc_stderr,none": 0.010947964603728239,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3515,
						"acc_stderr,none": 0.010678524731685641,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3615,
						"acc_stderr,none": 0.010745538995515901,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5465,
						"acc_stderr,none": 0.011134669525078671,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.527,
						"acc_stderr,none": 0.011166819105029988,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.4645,
						"acc_stderr,none": 0.011154913314119564,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5065,
						"acc_stderr,none": 0.011182191006142296,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4507857142857143,
						"acc_stderr,none": 0.06106676645703439,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7747551686615887,
						"acc_norm,none": 0.7769314472252449,
						"acc_norm_stderr,none": 0.009713057213018522,
						"acc_stderr,none": 0.00974664347103215,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7458966934943291,
						"acc_norm,none": 0.6370611422748406,
						"acc_norm_stderr,none": 0.010081877708510817,
						"acc_stderr,none": 0.1390221629887055,
						"alias": "pythia",
						"bits_per_byte,none": 0.6363476745780474,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5543890841221912,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4342308757822018,
						"perplexity_stderr,none": 0.06693174674753623,
						"word_perplexity,none": 10.576847482578392,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.499725425590335,
						"acc_stderr,none": 0.006765409531672773,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6631214444719268,
						"acc_stderr,none": 0.0023506432349534464,
						"alias": " - qqp",
						"f1,none": 0.6792123981346272,
						"f1_stderr,none": 0.0026025298414536005
					},
					"record": {
						"alias": "record",
						"em,none": 0.2709,
						"em_stderr,none": 0.004444467834222126,
						"f1,none": 0.28019190499782565,
						"f1_stderr,none": 0.004455108891970916
					},
					"rte": {
						"acc,none": 0.5884476534296029,
						"acc_stderr,none": 0.0296218322224172,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.954,
						"acc_norm,none": 0.932,
						"acc_norm_stderr,none": 0.007964887911291603,
						"acc_stderr,none": 0.006627814717380707,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9139908256880734,
						"acc_stderr,none": 0.009500232412777832,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.32941774242352867,
						"acc_stderr,none": 0.0016185082452014232,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.35128518971848227,
						"bleu_acc_stderr,none": 0.0167113581635444,
						"bleu_diff,none": -5.073626658121167,
						"bleu_diff_stderr,none": 0.9188952783988822,
						"bleu_max,none": 28.371862716234155,
						"bleu_max_stderr,none": 0.8138253180980356,
						"rouge1_acc,none": 0.3243574051407589,
						"rouge1_acc_stderr,none": 0.01638797677964794,
						"rouge1_diff,none": -6.590060734936701,
						"rouge1_diff_stderr,none": 1.043618044751592,
						"rouge1_max,none": 53.787449919973184,
						"rouge1_max_stderr,none": 0.870627841172565,
						"rouge2_acc,none": 0.28886168910648713,
						"rouge2_acc_stderr,none": 0.015866346401384308,
						"rouge2_diff,none": -8.197533631539986,
						"rouge2_diff_stderr,none": 1.235838968512957,
						"rouge2_max,none": 38.016601381205575,
						"rouge2_max_stderr,none": 1.045156582546898,
						"rougeL_acc,none": 0.3243574051407589,
						"rougeL_acc_stderr,none": 0.01638797677964794,
						"rougeL_diff,none": -6.812687503821623,
						"rougeL_diff_stderr,none": 1.0599226404594715,
						"rougeL_max,none": 50.97546516149105,
						"rougeL_max_stderr,none": 0.8953741526473659
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.35128518971848227,
						"bleu_acc_stderr,none": 0.0167113581635444,
						"bleu_diff,none": -5.073626658121167,
						"bleu_diff_stderr,none": 0.9188952783988822,
						"bleu_max,none": 28.371862716234155,
						"bleu_max_stderr,none": 0.8138253180980356,
						"rouge1_acc,none": 0.3243574051407589,
						"rouge1_acc_stderr,none": 0.01638797677964794,
						"rouge1_diff,none": -6.590060734936701,
						"rouge1_diff_stderr,none": 1.043618044751592,
						"rouge1_max,none": 53.787449919973184,
						"rouge1_max_stderr,none": 0.870627841172565,
						"rouge2_acc,none": 0.28886168910648713,
						"rouge2_acc_stderr,none": 0.015866346401384308,
						"rouge2_diff,none": -8.197533631539986,
						"rouge2_diff_stderr,none": 1.235838968512957,
						"rouge2_max,none": 38.016601381205575,
						"rouge2_max_stderr,none": 1.045156582546898,
						"rougeL_acc,none": 0.3243574051407589,
						"rougeL_acc_stderr,none": 0.01638797677964794,
						"rougeL_diff,none": -6.812687503821623,
						"rougeL_diff_stderr,none": 1.0599226404594715,
						"rougeL_max,none": 50.97546516149105,
						"rougeL_max_stderr,none": 0.8953741526473659
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25458996328029376,
						"acc_stderr,none": 0.015250117079156496,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.40424552156676363,
						"acc_stderr,none": 0.014262003347342948,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6363476745780474,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5543890841221912,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.576847482578392,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6748224151539068,
						"acc_stderr,none": 0.013165525471764358,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.43661971830985913,
						"acc_stderr,none": 0.0592793555841297,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5192307692307693,
						"acc_stderr,none": 0.049230010729780505,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6261818181818182,
						"acc_stderr,none": 0.0727927369933245,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.02183468586936921,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.01992048320956607,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.01987435483128749,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.02238235778196213,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.02226169729227013,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228134,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.662,
						"acc_stderr,none": 0.021175665695209407,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.020313179231745183,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.02022934632917752,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4416331994645248,
						"acc_stderr,none": 0.049526967860546896,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3329317269076305,
						"acc_stderr,none": 0.009446051001358225,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4771084337349398,
						"acc_stderr,none": 0.010011563747774337,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4827309236947791,
						"acc_stderr,none": 0.010016093498409704,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39397590361445783,
						"acc_stderr,none": 0.009794163014906761,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5377510040160642,
						"acc_stderr,none": 0.00999346636087279,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4975903614457831,
						"acc_stderr,none": 0.010021956483068082,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4963855421686747,
						"acc_stderr,none": 0.01002181100096635,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42931726907630524,
						"acc_stderr,none": 0.009921425969589913,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4923694779116466,
						"acc_stderr,none": 0.010020905731542304,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.40481927710843374,
						"acc_stderr,none": 0.009838809968433946,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42690763052208835,
						"acc_stderr,none": 0.009914408828583408,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.463855421686747,
						"acc_stderr,none": 0.009995852282822376,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41606425702811245,
						"acc_stderr,none": 0.009879848511479767,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40642570281124496,
						"acc_stderr,none": 0.009844999034464199,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.36626506024096384,
						"acc_stderr,none": 0.009656930886014766,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6323927561518561,
						"acc_stderr,none": 0.060764631830965705,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.600926538716082,
						"acc_stderr,none": 0.01260226600518431,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7816015883520847,
						"acc_stderr,none": 0.010632343054700498,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7107875579086698,
						"acc_stderr,none": 0.01166782538830548,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5671740569159497,
						"acc_stderr,none": 0.012750474502985824,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6055592322964924,
						"acc_stderr,none": 0.012577106513936129,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6618133686300464,
						"acc_stderr,none": 0.012174678796437402,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5400397088021178,
						"acc_stderr,none": 0.01282580237008399,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.684976836532098,
						"acc_stderr,none": 0.011954205387840939,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5592322964923891,
						"acc_stderr,none": 0.01277651858633279,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5903375248180013,
						"acc_stderr,none": 0.012655369030750353,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6538716082064858,
						"acc_stderr,none": 0.012242676637496357,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8042256686895932,
						"acc_stderr,none": 0.035867365106512665,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8589247311827957,
						"acc_stderr,none": 0.00722079366580279,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7228915662650602,
						"acc_stderr,none": 0.049425892997830935,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.748696558915537,
						"acc_stderr,none": 0.014014234546353834,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7642585551330798,
						"acc_stderr,none": 0.026223308206222526,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6571428571428571,
						"acc_stderr,none": 0.026786851659200927,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7837301587301587,
						"acc_stderr,none": 0.01835681232408577,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/R4-7B-Base-With-Mask_pth"
	},
	"./rwkv-x-dev/R4-no-shuffle-rwkv-53_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6152762119503946,
						"acc_norm,none": 0.6017474633596392,
						"acc_norm_stderr,none": 0.0911100428791231,
						"acc_stderr,none": 0.10937162008709193,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3528125,
						"acc_stderr,none": 0.0158866115093717,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8181641791044776,
						"acc_stderr,none": 0.17053981957568717,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.3055603522707651,
						"acc_norm,none": 0.3055603522707651,
						"acc_norm_stderr,none": 0.05522800971226768,
						"acc_stderr,none": 0.05522800971226768,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5359695092901382,
						"acc_stderr,none": 0.013956822617193405,
						"alias": "glue",
						"f1,none": 0.6621513471510841,
						"f1_stderr,none": 0.00027891244054560935,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.69241218707549,
						"acc_stderr,none": 0.023590498883457767,
						"alias": "lambada",
						"perplexity,none": 4.1259759193841825,
						"perplexity_stderr,none": 0.38072118762900736
					},
					"lambada_multilingual": {
						"acc,none": 0.5300213467882787,
						"acc_stderr,none": 0.08478081425602138,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.83675920800228,
						"perplexity_stderr,none": 8.492714323411757
					},
					"mmlu": {
						"acc,none": 0.3279447372169207,
						"acc_stderr,none": 0.06175547312822675,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.31179596174282675,
						"acc_stderr,none": 0.05700987817666786,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.36208561313163823,
						"acc_stderr,none": 0.05565801474154758,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.35359116022099446,
						"acc_stderr,none": 0.056420489634274644,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.29337139232477005,
						"acc_stderr,none": 0.06530675236787743,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.48392857142857143,
						"acc_stderr,none": 0.05852535930364702,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7273024128209116,
						"acc_norm,none": 0.6064898789054679,
						"acc_norm_stderr,none": 0.01038998442806782,
						"acc_stderr,none": 0.15885460387850886,
						"alias": "pythia",
						"bits_per_byte,none": 0.635164924156506,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.553115286890367,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.386876279975169,
						"perplexity_stderr,none": 0.06700819056449496,
						"word_perplexity,none": 10.530580627016594,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3224053075847113,
						"acc_stderr,none": 0.0015813739363792728,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31946144430844553,
						"bleu_acc_stderr,none": 0.016322644182960505,
						"bleu_diff,none": -7.196915376061092,
						"bleu_diff_stderr,none": 0.8684774958603122,
						"bleu_max,none": 27.56824932914943,
						"bleu_max_stderr,none": 0.812864783781585,
						"rouge1_acc,none": 0.2876376988984088,
						"rouge1_acc_stderr,none": 0.01584631510139481,
						"rouge1_diff,none": -9.368486370412562,
						"rouge1_diff_stderr,none": 0.9303629377150874,
						"rouge1_max,none": 52.794363146604496,
						"rouge1_max_stderr,none": 0.8660300659080384,
						"rouge2_acc,none": 0.26805385556915545,
						"rouge2_acc_stderr,none": 0.015506204722834547,
						"rouge2_diff,none": -11.321038448116774,
						"rouge2_diff_stderr,none": 1.1387251546987889,
						"rouge2_max,none": 37.05928939989468,
						"rouge2_max_stderr,none": 1.0221024564757786,
						"rougeL_acc,none": 0.2913096695226438,
						"rougeL_acc_stderr,none": 0.01590598704818483,
						"rougeL_diff,none": -9.618094158264453,
						"rougeL_diff_stderr,none": 0.9474689220561376,
						"rougeL_max,none": 49.98275074366403,
						"rougeL_max_stderr,none": 0.8840347746825815
					},
					"xcopa": {
						"acc,none": 0.6185454545454545,
						"acc_stderr,none": 0.07040035063257306,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4351807228915663,
						"acc_stderr,none": 0.05291016463801477,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6294446784188676,
						"acc_stderr,none": 0.06326447002592103,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.814340301191279,
						"acc_stderr,none": 0.0365225565825825,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6152762119503946,
						"acc_norm,none": 0.6017474633596392,
						"acc_norm_stderr,none": 0.0911100428791231,
						"acc_stderr,none": 0.10937162008709193,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3528125,
						"acc_stderr,none": 0.0158866115093717,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.366,
						"acc_stderr,none": 0.015240612726405752,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.349,
						"acc_stderr,none": 0.015080663991563098,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.013728421539454876,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3839590443686007,
						"acc_norm,none": 0.40955631399317405,
						"acc_norm_stderr,none": 0.01437035863247244,
						"acc_stderr,none": 0.01421244498065189,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7293771043771043,
						"acc_norm,none": 0.6965488215488216,
						"acc_norm_stderr,none": 0.009433837434252274,
						"acc_stderr,none": 0.009116466166403828,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8181641791044776,
						"acc_stderr,none": 0.17053981957568717,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523734,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844883,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578009,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.827,
						"acc_stderr,none": 0.011967214137559934,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.00896305396259208,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.773,
						"acc_stderr,none": 0.013253174964763935,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.015733516566347826,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.013019735539307811,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378222,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346933,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036216,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584938,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033835,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248114,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315158,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306489,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.681,
						"acc_stderr,none": 0.014746404865473487,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.703,
						"acc_stderr,none": 0.014456832294801103,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.754,
						"acc_stderr,none": 0.013626065817750636,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406729,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108652,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403626,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.237,
						"acc_stderr,none": 0.013454070462577955,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787745,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386703,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858917,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.753,
						"acc_stderr,none": 0.013644675781314135,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333377,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.01005510343582333,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280301,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.00905439020486644,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.783,
						"acc_stderr,none": 0.01304151375727071,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.010131468138757007,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.342,
						"acc_stderr,none": 0.015008706182121731,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.631,
						"acc_stderr,none": 0.015266698139154624,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.575,
						"acc_stderr,none": 0.0156403203170401,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475284,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.015818160898606715,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.0096168333396958,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.00944924802766275,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425665,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406727,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346939,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491111,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.01110798754893915,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.015811198373114878,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118775,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946088,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767593,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.01414298497574067,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.466,
						"acc_stderr,none": 0.015782683329937618,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651525,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.0059721576223896195,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.569,
						"acc_stderr,none": 0.015667944488173498,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.01057013376110865,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713327,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.01225945734093858,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.011266140684632168,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474918,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919306,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.0040899544896891024,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679195,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.354,
						"acc_stderr,none": 0.015129868238451773,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.304,
						"acc_stderr,none": 0.014553205687950444,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.3055603522707651,
						"acc_norm,none": 0.3055603522707651,
						"acc_norm_stderr,none": 0.05522800971226768,
						"acc_stderr,none": 0.05522800971226768,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28994082840236685,
						"acc_norm,none": 0.28994082840236685,
						"acc_norm_stderr,none": 0.03500638924911012,
						"acc_stderr,none": 0.03500638924911012,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.036628698766429046,
						"acc_stderr,none": 0.036628698766429046,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.03334645408665337,
						"acc_stderr,none": 0.03334645408665337,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.40625,
						"acc_norm,none": 0.40625,
						"acc_norm_stderr,none": 0.03894932504400619,
						"acc_stderr,none": 0.03894932504400619,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.03608541011573967,
						"acc_stderr,none": 0.03608541011573967,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3253588516746411,
						"acc_norm,none": 0.3253588516746411,
						"acc_norm_stderr,none": 0.03248523846063362,
						"acc_stderr,none": 0.03248523846063362,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03489370652018761,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3282442748091603,
						"acc_norm,none": 0.3282442748091603,
						"acc_norm_stderr,none": 0.04118438565806298,
						"acc_stderr,none": 0.04118438565806298,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.35294117647058826,
						"acc_norm,none": 0.35294117647058826,
						"acc_norm_stderr,none": 0.04112975875177067,
						"acc_stderr,none": 0.04112975875177067,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.34579439252336447,
						"acc_norm,none": 0.34579439252336447,
						"acc_norm_stderr,none": 0.04619693596622581,
						"acc_stderr,none": 0.04619693596622581,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.025903936365684936,
						"acc_stderr,none": 0.025903936365684936,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.31862745098039214,
						"acc_norm,none": 0.31862745098039214,
						"acc_norm_stderr,none": 0.0327028718148208,
						"acc_stderr,none": 0.0327028718148208,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3407821229050279,
						"acc_norm,none": 0.3407821229050279,
						"acc_norm_stderr,none": 0.0355257200397793,
						"acc_stderr,none": 0.0355257200397793,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.270042194092827,
						"acc_norm,none": 0.270042194092827,
						"acc_norm_stderr,none": 0.028900721906293426,
						"acc_stderr,none": 0.028900721906293426,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.37383177570093457,
						"acc_norm,none": 0.37383177570093457,
						"acc_norm_stderr,none": 0.04699273118994851,
						"acc_stderr,none": 0.04699273118994851,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.4056603773584906,
						"acc_norm,none": 0.4056603773584906,
						"acc_norm_stderr,none": 0.04791858528000114,
						"acc_stderr,none": 0.04791858528000114,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2037037037037037,
						"acc_norm,none": 0.2037037037037037,
						"acc_norm_stderr,none": 0.03893542518824847,
						"acc_stderr,none": 0.03893542518824847,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.04350546818999061,
						"acc_stderr,none": 0.04350546818999061,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.26373626373626374,
						"acc_norm,none": 0.26373626373626374,
						"acc_norm_stderr,none": 0.026718814072967542,
						"acc_stderr,none": 0.026718814072967542,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.31862745098039214,
						"acc_norm,none": 0.31862745098039214,
						"acc_norm_stderr,none": 0.0327028718148208,
						"acc_stderr,none": 0.0327028718148208,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.29239766081871343,
						"acc_norm,none": 0.29239766081871343,
						"acc_norm_stderr,none": 0.03488647713457922,
						"acc_stderr,none": 0.03488647713457922,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.29931972789115646,
						"acc_norm,none": 0.29931972789115646,
						"acc_norm_stderr,none": 0.0379010453091039,
						"acc_stderr,none": 0.0379010453091039,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.28776978417266186,
						"acc_norm,none": 0.28776978417266186,
						"acc_norm_stderr,none": 0.03853836179233389,
						"acc_stderr,none": 0.03853836179233389,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.03815152004368298,
						"acc_stderr,none": 0.03815152004368298,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3128834355828221,
						"acc_norm,none": 0.3128834355828221,
						"acc_norm_stderr,none": 0.03642914578292405,
						"acc_stderr,none": 0.03642914578292405,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.03430085607014882,
						"acc_stderr,none": 0.03430085607014882,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.30952380952380953,
						"acc_norm,none": 0.30952380952380953,
						"acc_norm_stderr,none": 0.029179948694667317,
						"acc_stderr,none": 0.029179948694667317,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.03191178226713546,
						"acc_stderr,none": 0.03191178226713546,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.4327731092436975,
						"acc_norm,none": 0.4327731092436975,
						"acc_norm_stderr,none": 0.032183581077426124,
						"acc_stderr,none": 0.032183581077426124,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.22608695652173913,
						"acc_norm,none": 0.22608695652173913,
						"acc_norm_stderr,none": 0.027641785707241334,
						"acc_stderr,none": 0.027641785707241334,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.32592592592592595,
						"acc_norm,none": 0.32592592592592595,
						"acc_norm_stderr,none": 0.040491220417025055,
						"acc_stderr,none": 0.040491220417025055,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3146853146853147,
						"acc_norm,none": 0.3146853146853147,
						"acc_norm_stderr,none": 0.03897077881510411,
						"acc_stderr,none": 0.03897077881510411,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.3068181818181818,
						"acc_norm,none": 0.3068181818181818,
						"acc_norm_stderr,none": 0.03486142240553238,
						"acc_stderr,none": 0.03486142240553238,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.28859060402684567,
						"acc_norm,none": 0.28859060402684567,
						"acc_norm_stderr,none": 0.03724517629698768,
						"acc_stderr,none": 0.03724517629698768,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3305084745762712,
						"acc_norm,none": 0.3305084745762712,
						"acc_norm_stderr,none": 0.043488147791922734,
						"acc_stderr,none": 0.043488147791922734,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.034450002891734596,
						"acc_stderr,none": 0.034450002891734596,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878284,
						"acc_stderr,none": 0.04122066502878284,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.03822127078536156,
						"acc_stderr,none": 0.03822127078536156,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.30952380952380953,
						"acc_norm,none": 0.30952380952380953,
						"acc_norm_stderr,none": 0.04134913018303316,
						"acc_stderr,none": 0.04134913018303316,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.31351351351351353,
						"acc_norm,none": 0.31351351351351353,
						"acc_norm_stderr,none": 0.03420071750756409,
						"acc_stderr,none": 0.03420071750756409,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.36046511627906974,
						"acc_norm,none": 0.36046511627906974,
						"acc_norm_stderr,none": 0.036716872822364986,
						"acc_stderr,none": 0.036716872822364986,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2895377128953771,
						"acc_norm,none": 0.2895377128953771,
						"acc_norm_stderr,none": 0.022399130302514076,
						"acc_stderr,none": 0.022399130302514076,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.4158878504672897,
						"acc_norm,none": 0.4158878504672897,
						"acc_norm_stderr,none": 0.03377119548676909,
						"acc_stderr,none": 0.03377119548676909,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.25203252032520324,
						"acc_norm,none": 0.25203252032520324,
						"acc_norm_stderr,none": 0.03930879526823993,
						"acc_stderr,none": 0.03930879526823993,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3114754098360656,
						"acc_norm,none": 0.3114754098360656,
						"acc_norm_stderr,none": 0.04209969267310141,
						"acc_stderr,none": 0.04209969267310141,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03260773253630124,
						"acc_stderr,none": 0.03260773253630124,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.35555555555555557,
						"acc_norm,none": 0.35555555555555557,
						"acc_norm_stderr,none": 0.0357783213964892,
						"acc_stderr,none": 0.0357783213964892,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.034380708208626445,
						"acc_stderr,none": 0.034380708208626445,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3310344827586207,
						"acc_norm,none": 0.3310344827586207,
						"acc_norm_stderr,none": 0.039215453124671215,
						"acc_stderr,none": 0.039215453124671215,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.04622501635210239,
						"acc_stderr,none": 0.04622501635210239,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2742857142857143,
						"acc_norm,none": 0.2742857142857143,
						"acc_norm_stderr,none": 0.033822819375172945,
						"acc_stderr,none": 0.033822819375172945,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2796208530805687,
						"acc_norm,none": 0.2796208530805687,
						"acc_norm_stderr,none": 0.030971033440870904,
						"acc_stderr,none": 0.030971033440870904,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.02251703243459229,
						"acc_stderr,none": 0.02251703243459229,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.34913793103448276,
						"acc_norm,none": 0.34913793103448276,
						"acc_norm_stderr,none": 0.03136440175753369,
						"acc_stderr,none": 0.03136440175753369,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.03613730415279119,
						"acc_stderr,none": 0.03613730415279119,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.0391545063041425,
						"acc_stderr,none": 0.0391545063041425,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.33185840707964603,
						"acc_norm,none": 0.33185840707964603,
						"acc_norm_stderr,none": 0.031392030462821255,
						"acc_stderr,none": 0.031392030462821255,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.03588624800091709,
						"acc_stderr,none": 0.03588624800091709,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741764,
						"acc_stderr,none": 0.03351597731741764,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.30177514792899407,
						"acc_norm,none": 0.30177514792899407,
						"acc_norm_stderr,none": 0.03541479614288122,
						"acc_stderr,none": 0.03541479614288122,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.32298136645962733,
						"acc_norm,none": 0.32298136645962733,
						"acc_norm_stderr,none": 0.036968263701746516,
						"acc_stderr,none": 0.036968263701746516,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.034049163262375844,
						"acc_stderr,none": 0.034049163262375844,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.03588702812826371,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5359695092901382,
						"acc_stderr,none": 0.013956822617193405,
						"alias": "glue",
						"f1,none": 0.6621513471510841,
						"f1_stderr,none": 0.00027891244054560935,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.5231029675363473,
						"acc_norm,none": 0.7055367456681936,
						"acc_norm_stderr,none": 0.004548695749620953,
						"acc_stderr,none": 0.004984452002563931,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.69241218707549,
						"acc_stderr,none": 0.023590498883457767,
						"alias": "lambada",
						"perplexity,none": 4.1259759193841825,
						"perplexity_stderr,none": 0.38072118762900736
					},
					"lambada_multilingual": {
						"acc,none": 0.5300213467882787,
						"acc_stderr,none": 0.08478081425602138,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.83675920800228,
						"perplexity_stderr,none": 8.492714323411757
					},
					"lambada_openai": {
						"acc,none": 0.736852319037454,
						"acc_stderr,none": 0.006134823516779081,
						"alias": " - lambada_openai",
						"perplexity,none": 3.386876279975169,
						"perplexity_stderr,none": 0.06700819056449496
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4213079759363478,
						"acc_stderr,none": 0.006879163975434955,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 35.635165233099556,
						"perplexity_stderr,none": 1.9718136251503395
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7372404424607025,
						"acc_stderr,none": 0.00613191193948416,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3864888327778866,
						"perplexity_stderr,none": 0.06699625030951403
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4457597516010091,
						"acc_stderr,none": 0.006924868480005585,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.75606705944472,
						"perplexity_stderr,none": 1.455910823450434
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5418202988550359,
						"acc_stderr,none": 0.0069415687750082455,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 17.274790452312086,
						"perplexity_stderr,none": 0.8368975353322086
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5039782650882981,
						"acc_stderr,none": 0.006965757158314169,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 23.131284462377145,
						"perplexity_stderr,none": 1.2233906124914016
					},
					"lambada_standard": {
						"acc,none": 0.6470017465554047,
						"acc_stderr,none": 0.006658111712346053,
						"alias": " - lambada_standard",
						"perplexity,none": 4.865405934278854,
						"perplexity_stderr,none": 0.10957154295214362
					},
					"logiqa": {
						"acc,none": 0.24270353302611367,
						"acc_norm,none": 0.2764976958525346,
						"acc_norm_stderr,none": 0.017543209075825197,
						"acc_stderr,none": 0.016815676206479523,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3279447372169207,
						"acc_stderr,none": 0.06175547312822675,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.03861229196653695,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34814814814814815,
						"acc_stderr,none": 0.041153246103369526,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3355263157894737,
						"acc_stderr,none": 0.03842498559395268,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.37358490566037733,
						"acc_stderr,none": 0.029773082713319875,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3125,
						"acc_stderr,none": 0.038760854559127644,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.04461960433384741,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036846,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.28901734104046245,
						"acc_stderr,none": 0.034564257450869995,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.0433643270799318,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.04923659639173309,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.33617021276595743,
						"acc_stderr,none": 0.030881618520676942,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03947152782669415,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.3103448275862069,
						"acc_stderr,none": 0.03855289616378949,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.23015873015873015,
						"acc_stderr,none": 0.02167921966369314,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.040735243221471255,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.432258064516129,
						"acc_stderr,none": 0.028181739720019413,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.270935960591133,
						"acc_stderr,none": 0.031270907132976984,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.43636363636363634,
						"acc_stderr,none": 0.03872592983524753,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4090909090909091,
						"acc_stderr,none": 0.03502975799413007,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.43005181347150256,
						"acc_stderr,none": 0.035729543331448094,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.31794871794871793,
						"acc_stderr,none": 0.02361088430892786,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.026719240783712177,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3067226890756303,
						"acc_stderr,none": 0.029953823891887048,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2913907284768212,
						"acc_stderr,none": 0.037101857261199946,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3963302752293578,
						"acc_stderr,none": 0.020971469947900532,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2175925925925926,
						"acc_stderr,none": 0.028139689444859676,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4264705882352941,
						"acc_stderr,none": 0.03471157907953424,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3459915611814346,
						"acc_stderr,none": 0.030964810588786716,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.37668161434977576,
						"acc_stderr,none": 0.03252113489929188,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.42748091603053434,
						"acc_stderr,none": 0.04338920305792401,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.31179596174282675,
						"acc_stderr,none": 0.05700987817666786,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.33884297520661155,
						"acc_stderr,none": 0.043207678075366705,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3148148148148148,
						"acc_stderr,none": 0.04489931073591311,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3496932515337423,
						"acc_stderr,none": 0.037466683254700206,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.04287858751340456,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4174757281553398,
						"acc_stderr,none": 0.048828405482122375,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.4358974358974359,
						"acc_stderr,none": 0.032485775115783995,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4482758620689655,
						"acc_stderr,none": 0.01778403453499244,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2947976878612717,
						"acc_stderr,none": 0.024547617794803838,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.02609016250427906,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.36208561313163823,
						"acc_stderr,none": 0.05565801474154758,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.42765273311897106,
						"acc_stderr,none": 0.028099240775809563,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3487654320987654,
						"acc_stderr,none": 0.026517597724465013,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25177304964539005,
						"acc_stderr,none": 0.025892151156709405,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2777053455019557,
						"acc_stderr,none": 0.011438741422769565,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3235294117647059,
						"acc_stderr,none": 0.028418208619406794,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.32679738562091504,
						"acc_stderr,none": 0.01897542792050722,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.42727272727272725,
						"acc_stderr,none": 0.04738198703545483,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.24081632653061225,
						"acc_stderr,none": 0.02737294220178816,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.35359116022099446,
						"acc_stderr,none": 0.056420489634274644,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.42786069651741293,
						"acc_stderr,none": 0.03498541988407795,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.29337139232477005,
						"acc_stderr,none": 0.06530675236787743,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3253012048192771,
						"acc_stderr,none": 0.03647168523683228,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.4093567251461988,
						"acc_stderr,none": 0.03771283107626544,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.32694854814060115,
						"acc_stderr,none": 0.004735227100018153,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.32811228641171686,
						"acc_stderr,none": 0.004735444470368719,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7549019607843137,
						"acc_stderr,none": 0.0213215115784726,
						"alias": " - mrpc",
						"f1,none": 0.8402555910543131,
						"f1_stderr,none": 0.01577775456725611
					},
					"openbookqa": {
						"acc,none": 0.29,
						"acc_norm,none": 0.42,
						"acc_norm_stderr,none": 0.02209471322976178,
						"acc_stderr,none": 0.02031317923174518,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.444,
						"acc_stderr,none": 0.011112774040420284,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.010722662344020561,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.424,
						"acc_stderr,none": 0.011053193499766087,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.011131484850525779,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.011127079848413747,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.539,
						"acc_stderr,none": 0.011149065020234336,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5245,
						"acc_stderr,none": 0.011169702598013186,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48392857142857143,
						"acc_stderr,none": 0.05852535930364702,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7633297062023939,
						"acc_norm,none": 0.779107725788901,
						"acc_norm_stderr,none": 0.009679088048842219,
						"acc_stderr,none": 0.009916841655042809,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7273024128209116,
						"acc_norm,none": 0.6064898789054679,
						"acc_norm_stderr,none": 0.01038998442806782,
						"acc_stderr,none": 0.15885460387850886,
						"alias": "pythia",
						"bits_per_byte,none": 0.635164924156506,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.553115286890367,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.386876279975169,
						"perplexity_stderr,none": 0.06700819056449496,
						"word_perplexity,none": 10.530580627016594,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49899322716456157,
						"acc_stderr,none": 0.006765396837036612,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6313875834776156,
						"acc_stderr,none": 0.002399310744871023,
						"alias": " - qqp",
						"f1,none": 0.6606089590307668,
						"f1_stderr,none": 0.0026123167597888607
					},
					"record": {
						"alias": "record",
						"em,none": 0.267,
						"em_stderr,none": 0.004424144810664585,
						"f1,none": 0.27631047641932965,
						"f1_stderr,none": 0.004434293873068706
					},
					"rte": {
						"acc,none": 0.6425992779783394,
						"acc_stderr,none": 0.028846510722612004,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.941,
						"acc_norm,none": 0.912,
						"acc_norm_stderr,none": 0.008963053962592076,
						"acc_stderr,none": 0.007454835650406727,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9071100917431193,
						"acc_stderr,none": 0.009835698073987974,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3224053075847113,
						"acc_stderr,none": 0.0015813739363792728,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31946144430844553,
						"bleu_acc_stderr,none": 0.016322644182960505,
						"bleu_diff,none": -7.196915376061092,
						"bleu_diff_stderr,none": 0.8684774958603122,
						"bleu_max,none": 27.56824932914943,
						"bleu_max_stderr,none": 0.812864783781585,
						"rouge1_acc,none": 0.2876376988984088,
						"rouge1_acc_stderr,none": 0.01584631510139481,
						"rouge1_diff,none": -9.368486370412562,
						"rouge1_diff_stderr,none": 0.9303629377150874,
						"rouge1_max,none": 52.794363146604496,
						"rouge1_max_stderr,none": 0.8660300659080384,
						"rouge2_acc,none": 0.26805385556915545,
						"rouge2_acc_stderr,none": 0.015506204722834547,
						"rouge2_diff,none": -11.321038448116774,
						"rouge2_diff_stderr,none": 1.1387251546987889,
						"rouge2_max,none": 37.05928939989468,
						"rouge2_max_stderr,none": 1.0221024564757786,
						"rougeL_acc,none": 0.2913096695226438,
						"rougeL_acc_stderr,none": 0.01590598704818483,
						"rougeL_diff,none": -9.618094158264453,
						"rougeL_diff_stderr,none": 0.9474689220561376,
						"rougeL_max,none": 49.98275074366403,
						"rougeL_max_stderr,none": 0.8840347746825815
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.31946144430844553,
						"bleu_acc_stderr,none": 0.016322644182960505,
						"bleu_diff,none": -7.196915376061092,
						"bleu_diff_stderr,none": 0.8684774958603122,
						"bleu_max,none": 27.56824932914943,
						"bleu_max_stderr,none": 0.812864783781585,
						"rouge1_acc,none": 0.2876376988984088,
						"rouge1_acc_stderr,none": 0.01584631510139481,
						"rouge1_diff,none": -9.368486370412562,
						"rouge1_diff_stderr,none": 0.9303629377150874,
						"rouge1_max,none": 52.794363146604496,
						"rouge1_max_stderr,none": 0.8660300659080384,
						"rouge2_acc,none": 0.26805385556915545,
						"rouge2_acc_stderr,none": 0.015506204722834547,
						"rouge2_diff,none": -11.321038448116774,
						"rouge2_diff_stderr,none": 1.1387251546987889,
						"rouge2_max,none": 37.05928939989468,
						"rouge2_max_stderr,none": 1.0221024564757786,
						"rougeL_acc,none": 0.2913096695226438,
						"rougeL_acc_stderr,none": 0.01590598704818483,
						"rougeL_diff,none": -9.618094158264453,
						"rougeL_diff_stderr,none": 0.9474689220561376,
						"rougeL_max,none": 49.98275074366403,
						"rougeL_max_stderr,none": 0.8840347746825815
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2484700122399021,
						"acc_stderr,none": 0.015127427096520674,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.39634060292952056,
						"acc_stderr,none": 0.014116773876958054,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.635164924156506,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.553115286890367,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.530580627016594,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6803472770323599,
						"acc_stderr,none": 0.013106528517665137,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.49295774647887325,
						"acc_stderr,none": 0.05975550263548289,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6185454545454545,
						"acc_stderr,none": 0.07040035063257306,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.021930844120728505,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.02035437548053008,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.742,
						"acc_stderr,none": 0.019586711785215837,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269948,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.022064943313928866,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.022149790663861923,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.636,
						"acc_stderr,none": 0.02153917063731769,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.02018670369357085,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.68,
						"acc_stderr,none": 0.02088234048876181,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4351807228915663,
						"acc_stderr,none": 0.05291016463801477,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.336144578313253,
						"acc_stderr,none": 0.009468634669293527,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.47630522088353416,
						"acc_stderr,none": 0.010010812905412067,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4811244979919679,
						"acc_stderr,none": 0.010014928901071305,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3779116465863454,
						"acc_stderr,none": 0.00971871228122746,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5329317269076306,
						"acc_stderr,none": 0.010000311392557843,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5060240963855421,
						"acc_stderr,none": 0.01002134544404757,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5056224899598394,
						"acc_stderr,none": 0.010021439203777325,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42931726907630524,
						"acc_stderr,none": 0.009921425969589916,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.48714859437751,
						"acc_stderr,none": 0.010018761856718258,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.40120481927710844,
						"acc_stderr,none": 0.009824484469158963,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40321285140562246,
						"acc_stderr,none": 0.009832511560868071,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44056224899598395,
						"acc_stderr,none": 0.009951008027814038,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40602409638554215,
						"acc_stderr,none": 0.00984346200738424,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.009819585875881304,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3441767068273092,
						"acc_stderr,none": 0.00952295446980604,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6294446784188676,
						"acc_stderr,none": 0.06326447002592103,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5830575777630708,
						"acc_stderr,none": 0.012688354121607803,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7842488418266049,
						"acc_stderr,none": 0.01058558922710119,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7127729980145598,
						"acc_stderr,none": 0.011643935161147862,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5645268034414295,
						"acc_stderr,none": 0.012759525506489233,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6048974189278623,
						"acc_stderr,none": 0.012580772976133262,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6724023825281271,
						"acc_stderr,none": 0.01207804144298347,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5459960291197882,
						"acc_stderr,none": 0.012812565368728929,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.684976836532098,
						"acc_stderr,none": 0.011954205387840939,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5499669093315684,
						"acc_stderr,none": 0.012802713598219837,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.586366644606221,
						"acc_stderr,none": 0.012673714851823772,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6346790205162144,
						"acc_stderr,none": 0.012391557728373985,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.814340301191279,
						"acc_stderr,none": 0.0365225565825825,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8718279569892473,
						"acc_stderr,none": 0.006934162057729842,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.05066394254941721,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7539103232533889,
						"acc_stderr,none": 0.013916300191059498,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7832699619771863,
						"acc_stderr,none": 0.025454504291142595,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6793650793650794,
						"acc_stderr,none": 0.02633857021981404,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7837301587301587,
						"acc_stderr,none": 0.01835681232408577,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/R4-no-shuffle-rwkv-53_pth"
	},
	"./rwkv-x-dev/R4-with-shuffle-rwkv-53_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6223224351747464,
						"acc_norm,none": 0.6144306651634723,
						"acc_norm_stderr,none": 0.0890629236550453,
						"acc_stderr,none": 0.10867478870091458,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3728125,
						"acc_stderr,none": 0.01714025473086667,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8164925373134329,
						"acc_stderr,none": 0.1605693292257459,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.30132964945605256,
						"acc_norm,none": 0.30132964945605256,
						"acc_norm_stderr,none": 0.05579182517508882,
						"acc_stderr,none": 0.05579182517508882,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.6406175559790377,
						"acc_stderr,none": 0.0029016992309548024,
						"alias": "glue",
						"f1,none": 0.6899437499022025,
						"f1_stderr,none": 0.00021194546421286417,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.6952260818940423,
						"acc_stderr,none": 0.023166267931375648,
						"alias": "lambada",
						"perplexity,none": 4.069955548346865,
						"perplexity_stderr,none": 0.34968827347729686
					},
					"lambada_multilingual": {
						"acc,none": 0.5337861439937901,
						"acc_stderr,none": 0.08440507077186947,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.686206083021432,
						"perplexity_stderr,none": 8.430355695718372
					},
					"mmlu": {
						"acc,none": 0.3561458481697764,
						"acc_stderr,none": 0.06598554225645832,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.328586609989373,
						"acc_stderr,none": 0.06083077073352791,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.39652397811393625,
						"acc_stderr,none": 0.058601563774905024,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4007149821254469,
						"acc_stderr,none": 0.059349912838627235,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.31398667935299723,
						"acc_stderr,none": 0.06318036084319219,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4813571428571429,
						"acc_stderr,none": 0.0534526449360939,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7308011475391978,
						"acc_norm,none": 0.6189468351657585,
						"acc_norm_stderr,none": 0.010003645220049909,
						"acc_stderr,none": 0.15142216889151972,
						"alias": "pythia",
						"bits_per_byte,none": 0.6350900240825521,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5530346562463961,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3929482862948435,
						"perplexity_stderr,none": 0.06685882993754429,
						"word_perplexity,none": 10.527657507813059,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.32656045167970366,
						"acc_stderr,none": 0.0015130713379683325,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31456548347613217,
						"bleu_acc_stderr,none": 0.016255241993179178,
						"bleu_diff,none": -7.433818428450569,
						"bleu_diff_stderr,none": 0.863288167943516,
						"bleu_max,none": 26.69350922131407,
						"bleu_max_stderr,none": 0.797949354565667,
						"rouge1_acc,none": 0.29008567931456547,
						"rouge1_acc_stderr,none": 0.01588623687420952,
						"rouge1_diff,none": -9.685337443876643,
						"rouge1_diff_stderr,none": 0.9190145342369646,
						"rouge1_max,none": 51.96058575416866,
						"rouge1_max_stderr,none": 0.8575985110026056,
						"rouge2_acc,none": 0.2631578947368421,
						"rouge2_acc_stderr,none": 0.015415241740237014,
						"rouge2_diff,none": -11.613716960826352,
						"rouge2_diff_stderr,none": 1.1242858242883977,
						"rouge2_max,none": 35.95215198255373,
						"rouge2_max_stderr,none": 1.0106022291829948,
						"rougeL_acc,none": 0.29498164014687883,
						"rougeL_acc_stderr,none": 0.015964400965589667,
						"rougeL_diff,none": -9.854204504743402,
						"rougeL_diff_stderr,none": 0.9358254993395869,
						"rougeL_max,none": 49.307668496885384,
						"rougeL_max_stderr,none": 0.8739261865666953
					},
					"xcopa": {
						"acc,none": 0.6214545454545455,
						"acc_stderr,none": 0.07027373008728728,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43684069611780457,
						"acc_stderr,none": 0.05199253253663402,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.630226821490885,
						"acc_stderr,none": 0.0634364403908616,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8152393796358732,
						"acc_stderr,none": 0.036391676441638335,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6223224351747464,
						"acc_norm,none": 0.6144306651634723,
						"acc_norm_stderr,none": 0.0890629236550453,
						"acc_stderr,none": 0.10867478870091458,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3728125,
						"acc_stderr,none": 0.01714025473086667,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.391,
						"acc_stderr,none": 0.01543882629468179,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.369,
						"acc_stderr,none": 0.015266698139154619,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.36083333333333334,
						"acc_stderr,none": 0.013869180252444862,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3924914675767918,
						"acc_norm,none": 0.42662116040955633,
						"acc_norm_stderr,none": 0.014453185592920293,
						"acc_stderr,none": 0.014269634635670696,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7356902356902357,
						"acc_norm,none": 0.7070707070707071,
						"acc_norm_stderr,none": 0.009338583737393602,
						"acc_stderr,none": 0.009048410451863014,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8164925373134329,
						"acc_stderr,none": 0.1605693292257459,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118588,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.003299983316607816,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578009,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.829,
						"acc_stderr,none": 0.011912216456264604,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745923,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.01310617304066178,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.01579789775804277,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.798,
						"acc_stderr,none": 0.012702651587655127,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357791,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346933,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998145,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697587,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140924,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037186,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323497,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178347,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.01042749887234397,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.66,
						"acc_stderr,none": 0.014987482264363937,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.014498627873361428,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.013699915608779773,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291603,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849877,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.0044294039801783475,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.015663503610155283,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248104,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.822,
						"acc_stderr,none": 0.012102167676183571,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.015594460144140596,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.01404625563263392,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381793,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697582,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151112,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.731,
						"acc_stderr,none": 0.014029819522568198,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.01042749887234396,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.015275252316519362,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.015325105508898125,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.557,
						"acc_stderr,none": 0.015716169953204108,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.012953717566737234,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.015757928553979183,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340978,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.00927691010310331,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.01360735683959812,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.0077436402269192815,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448843,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491108,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.0111717862854965,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.015803979428161946,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426128,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936708,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910611,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.01420569610409148,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.442,
						"acc_stderr,none": 0.01571250721186421,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.010131468138756997,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280311,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.585,
						"acc_stderr,none": 0.015589035185604632,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855748,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904619,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.795,
						"acc_stderr,none": 0.012772554096113114,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.01110798754893915,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792948,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286427,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274701,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099227,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.015275252316519362,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.314,
						"acc_stderr,none": 0.014683991951087974,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.30132964945605256,
						"acc_norm,none": 0.30132964945605256,
						"acc_norm_stderr,none": 0.05579182517508882,
						"acc_stderr,none": 0.05579182517508882,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.33727810650887574,
						"acc_norm,none": 0.33727810650887574,
						"acc_norm_stderr,none": 0.03647582250277504,
						"acc_stderr,none": 0.03647582250277504,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03571428571428571,
						"acc_stderr,none": 0.03571428571428571,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.03334645408665337,
						"acc_stderr,none": 0.03334645408665337,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.03839344480212195,
						"acc_stderr,none": 0.03839344480212195,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624336,
						"acc_stderr,none": 0.03546563019624336,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.03223012819451555,
						"acc_stderr,none": 0.03223012819451555,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2375,
						"acc_norm,none": 0.2375,
						"acc_norm_stderr,none": 0.03374839851779223,
						"acc_stderr,none": 0.03374839851779223,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.29770992366412213,
						"acc_norm,none": 0.29770992366412213,
						"acc_norm_stderr,none": 0.04010358942462203,
						"acc_stderr,none": 0.04010358942462203,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.0402637721078731,
						"acc_stderr,none": 0.0402637721078731,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.0452235007738203,
						"acc_stderr,none": 0.0452235007738203,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.30959752321981426,
						"acc_norm,none": 0.30959752321981426,
						"acc_norm_stderr,none": 0.025764515105490104,
						"acc_stderr,none": 0.025764515105490104,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.03198001660115071,
						"acc_stderr,none": 0.03198001660115071,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3240223463687151,
						"acc_norm,none": 0.3240223463687151,
						"acc_norm_stderr,none": 0.03507871288800093,
						"acc_stderr,none": 0.03507871288800093,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.28270042194092826,
						"acc_norm,none": 0.28270042194092826,
						"acc_norm_stderr,none": 0.02931281415395592,
						"acc_stderr,none": 0.02931281415395592,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.044801270921106716,
						"acc_stderr,none": 0.044801270921106716,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3644859813084112,
						"acc_norm,none": 0.3644859813084112,
						"acc_norm_stderr,none": 0.04674660221110773,
						"acc_stderr,none": 0.04674660221110773,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.42452830188679247,
						"acc_norm,none": 0.42452830188679247,
						"acc_norm_stderr,none": 0.0482359303724347,
						"acc_stderr,none": 0.0482359303724347,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04186091791394607,
						"acc_stderr,none": 0.04186091791394607,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2761904761904762,
						"acc_norm,none": 0.2761904761904762,
						"acc_norm_stderr,none": 0.04384295586918882,
						"acc_stderr,none": 0.04384295586918882,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.04350546818999061,
						"acc_stderr,none": 0.04350546818999061,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2454212454212454,
						"acc_norm,none": 0.2454212454212454,
						"acc_norm_stderr,none": 0.02609299388422865,
						"acc_stderr,none": 0.02609299388422865,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.28921568627450983,
						"acc_norm,none": 0.28921568627450983,
						"acc_norm_stderr,none": 0.031822318676475524,
						"acc_stderr,none": 0.031822318676475524,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03615507630310935,
						"acc_stderr,none": 0.03615507630310935,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.037387423042158106,
						"acc_stderr,none": 0.037387423042158106,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.30935251798561153,
						"acc_norm,none": 0.30935251798561153,
						"acc_norm_stderr,none": 0.039347351125471115,
						"acc_stderr,none": 0.039347351125471115,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.37735849056603776,
						"acc_norm,none": 0.37735849056603776,
						"acc_norm_stderr,none": 0.03856271073542805,
						"acc_stderr,none": 0.03856271073542805,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3558282208588957,
						"acc_norm,none": 0.3558282208588957,
						"acc_norm_stderr,none": 0.03761521380046734,
						"acc_stderr,none": 0.03761521380046734,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.03407826167337437,
						"acc_stderr,none": 0.03407826167337437,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2976190476190476,
						"acc_norm,none": 0.2976190476190476,
						"acc_norm_stderr,none": 0.028858905984721215,
						"acc_stderr,none": 0.028858905984721215,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2878787878787879,
						"acc_norm,none": 0.2878787878787879,
						"acc_norm_stderr,none": 0.03225883512300992,
						"acc_stderr,none": 0.03225883512300992,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.37815126050420167,
						"acc_norm,none": 0.37815126050420167,
						"acc_norm_stderr,none": 0.03149930577784906,
						"acc_stderr,none": 0.03149930577784906,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.21304347826086956,
						"acc_norm,none": 0.21304347826086956,
						"acc_norm_stderr,none": 0.027057754389936198,
						"acc_stderr,none": 0.027057754389936198,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03944624162501117,
						"acc_stderr,none": 0.03944624162501117,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3776223776223776,
						"acc_norm,none": 0.3776223776223776,
						"acc_norm_stderr,none": 0.040682878492098076,
						"acc_stderr,none": 0.040682878492098076,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.03448901746724545,
						"acc_stderr,none": 0.03448901746724545,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3221476510067114,
						"acc_norm,none": 0.3221476510067114,
						"acc_norm_stderr,none": 0.038411757592369186,
						"acc_stderr,none": 0.038411757592369186,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.22485207100591717,
						"acc_norm,none": 0.22485207100591717,
						"acc_norm_stderr,none": 0.03220965704514523,
						"acc_stderr,none": 0.03220965704514523,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.037036671945524846,
						"acc_stderr,none": 0.037036671945524846,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2796610169491525,
						"acc_norm,none": 0.2796610169491525,
						"acc_norm_stderr,none": 0.04149459161011112,
						"acc_stderr,none": 0.04149459161011112,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.03494959016177541,
						"acc_stderr,none": 0.03494959016177541,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.03831305140884601,
						"acc_stderr,none": 0.03831305140884601,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.26573426573426573,
						"acc_norm,none": 0.26573426573426573,
						"acc_norm_stderr,none": 0.037068604626235575,
						"acc_stderr,none": 0.037068604626235575,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.042163702135578345,
						"acc_stderr,none": 0.042163702135578345,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741764,
						"acc_stderr,none": 0.03351597731741764,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3488372093023256,
						"acc_norm,none": 0.3488372093023256,
						"acc_norm_stderr,none": 0.036446693486947866,
						"acc_stderr,none": 0.036446693486947866,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26277372262773724,
						"acc_norm,none": 0.26277372262773724,
						"acc_norm_stderr,none": 0.021736991810864862,
						"acc_stderr,none": 0.021736991810864862,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.42990654205607476,
						"acc_norm,none": 0.42990654205607476,
						"acc_norm_stderr,none": 0.033921125520669684,
						"acc_stderr,none": 0.033921125520669684,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2032520325203252,
						"acc_norm,none": 0.2032520325203252,
						"acc_norm_stderr,none": 0.036433258517490706,
						"acc_stderr,none": 0.036433258517490706,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3524590163934426,
						"acc_norm,none": 0.3524590163934426,
						"acc_norm_stderr,none": 0.0434305428342706,
						"acc_stderr,none": 0.0434305428342706,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3380952380952381,
						"acc_norm,none": 0.3380952380952381,
						"acc_norm_stderr,none": 0.032722323714044405,
						"acc_stderr,none": 0.032722323714044405,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.31666666666666665,
						"acc_norm,none": 0.31666666666666665,
						"acc_norm_stderr,none": 0.034768900963930385,
						"acc_stderr,none": 0.034768900963930385,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31746031746031744,
						"acc_norm,none": 0.31746031746031744,
						"acc_norm_stderr,none": 0.03394921616447879,
						"acc_stderr,none": 0.03394921616447879,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.041678081808441535,
						"acc_stderr,none": 0.041678081808441535,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.30344827586206896,
						"acc_norm,none": 0.30344827586206896,
						"acc_norm_stderr,none": 0.038312260488503336,
						"acc_stderr,none": 0.038312260488503336,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3619047619047619,
						"acc_norm,none": 0.3619047619047619,
						"acc_norm_stderr,none": 0.04712194748483612,
						"acc_stderr,none": 0.04712194748483612,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.30857142857142855,
						"acc_norm,none": 0.30857142857142855,
						"acc_norm_stderr,none": 0.035016835199101176,
						"acc_stderr,none": 0.035016835199101176,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2843601895734597,
						"acc_norm,none": 0.2843601895734597,
						"acc_norm_stderr,none": 0.031129489323148664,
						"acc_stderr,none": 0.031129489323148664,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.02251703243459229,
						"acc_stderr,none": 0.02251703243459229,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3146551724137931,
						"acc_norm,none": 0.3146551724137931,
						"acc_norm_stderr,none": 0.030553855290356806,
						"acc_stderr,none": 0.030553855290356806,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3275862068965517,
						"acc_norm,none": 0.3275862068965517,
						"acc_norm_stderr,none": 0.03568272877241247,
						"acc_stderr,none": 0.03568272877241247,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.3037037037037037,
						"acc_norm,none": 0.3037037037037037,
						"acc_norm_stderr,none": 0.03972552884785138,
						"acc_stderr,none": 0.03972552884785138,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3274336283185841,
						"acc_norm,none": 0.3274336283185841,
						"acc_norm_stderr,none": 0.031285129400738305,
						"acc_stderr,none": 0.031285129400738305,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3393939393939394,
						"acc_norm,none": 0.3393939393939394,
						"acc_norm_stderr,none": 0.03697442205031596,
						"acc_stderr,none": 0.03697442205031596,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2810810810810811,
						"acc_norm,none": 0.2810810810810811,
						"acc_norm_stderr,none": 0.03313956873549873,
						"acc_stderr,none": 0.03313956873549873,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03560846537586734,
						"acc_stderr,none": 0.03560846537586734,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3167701863354037,
						"acc_norm,none": 0.3167701863354037,
						"acc_norm_stderr,none": 0.036778631311574536,
						"acc_stderr,none": 0.036778631311574536,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.034873508801977704,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.6406175559790377,
						"acc_stderr,none": 0.0029016992309548024,
						"alias": "glue",
						"f1,none": 0.6899437499022025,
						"f1_stderr,none": 0.00021194546421286417,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.528281218880701,
						"acc_norm,none": 0.7092212706632145,
						"acc_norm_stderr,none": 0.00453193539150701,
						"acc_stderr,none": 0.004981793089848263,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.6952260818940423,
						"acc_stderr,none": 0.023166267931375648,
						"alias": "lambada",
						"perplexity,none": 4.069955548346865,
						"perplexity_stderr,none": 0.34968827347729686
					},
					"lambada_multilingual": {
						"acc,none": 0.5337861439937901,
						"acc_stderr,none": 0.08440507077186947,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.686206083021432,
						"perplexity_stderr,none": 8.430355695718372
					},
					"lambada_openai": {
						"acc,none": 0.7403454298466913,
						"acc_stderr,none": 0.006108397042730503,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3929482862948435,
						"perplexity_stderr,none": 0.06685882993754429
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4261595187269552,
						"acc_stderr,none": 0.006889596071653635,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 35.343242026968326,
						"perplexity_stderr,none": 1.9594183241756988
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7399573064234427,
						"acc_stderr,none": 0.00611135809828806,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3926212866977905,
						"perplexity_stderr,none": 0.06683172394907767
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4473122452940035,
						"acc_stderr,none": 0.006927194593957516,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.716202452263484,
						"perplexity_stderr,none": 1.4646460714187488
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5422084222782845,
						"acc_stderr,none": 0.006941112792281866,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 17.19918847098422,
						"perplexity_stderr,none": 0.8377879595350564
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5132932272462644,
						"acc_stderr,none": 0.006963515307693612,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.779776178193337,
						"perplexity_stderr,none": 1.2119263291163993
					},
					"lambada_standard": {
						"acc,none": 0.6506889190762662,
						"acc_stderr,none": 0.006642093114734592,
						"alias": " - lambada_standard",
						"perplexity,none": 4.747302704075848,
						"perplexity_stderr,none": 0.10332767170869595
					},
					"logiqa": {
						"acc,none": 0.23963133640552994,
						"acc_norm,none": 0.282642089093702,
						"acc_norm_stderr,none": 0.017661585370360625,
						"acc_stderr,none": 0.016742766935101447,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3561458481697764,
						"acc_stderr,none": 0.06598554225645832,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.14,
						"acc_stderr,none": 0.0348735088019777,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.32592592592592595,
						"acc_stderr,none": 0.040491220417025055,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.35526315789473684,
						"acc_stderr,none": 0.03894734487013316,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.030151134457776285,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3194444444444444,
						"acc_stderr,none": 0.03899073687357336,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145633,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3872832369942196,
						"acc_stderr,none": 0.03714325906302065,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.04533838195929778,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.04988876515698589,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3446808510638298,
						"acc_stderr,none": 0.03106898596312215,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2543859649122807,
						"acc_stderr,none": 0.0409698513984367,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.31724137931034485,
						"acc_stderr,none": 0.03878352372138623,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.023456037383982015,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.31746031746031744,
						"acc_stderr,none": 0.04163453031302859,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4032258064516129,
						"acc_stderr,none": 0.027906150826041143,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2561576354679803,
						"acc_stderr,none": 0.030712730070982592,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.4121212121212121,
						"acc_stderr,none": 0.038435669935887165,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.45454545454545453,
						"acc_stderr,none": 0.03547601494006936,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.49740932642487046,
						"acc_stderr,none": 0.03608390745384488,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.4076923076923077,
						"acc_stderr,none": 0.024915243985987844,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.02730914058823018,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.35294117647058826,
						"acc_stderr,none": 0.031041941304059288,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.44770642201834865,
						"acc_stderr,none": 0.021319754962425455,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.3194444444444444,
						"acc_stderr,none": 0.03179876342176853,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.45098039215686275,
						"acc_stderr,none": 0.03492406104163613,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.4050632911392405,
						"acc_stderr,none": 0.03195514741370673,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.34080717488789236,
						"acc_stderr,none": 0.0318114974705536,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.4961832061068702,
						"acc_stderr,none": 0.043851623256015534,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.328586609989373,
						"acc_stderr,none": 0.06083077073352791,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2644628099173554,
						"acc_stderr,none": 0.04026187527591207,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3148148148148148,
						"acc_stderr,none": 0.04489931073591312,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3619631901840491,
						"acc_stderr,none": 0.03775700729141441,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3392857142857143,
						"acc_stderr,none": 0.04493949068613539,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4077669902912621,
						"acc_stderr,none": 0.048657775704107675,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.47435897435897434,
						"acc_stderr,none": 0.03271298896811159,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4789272030651341,
						"acc_stderr,none": 0.017864076786212896,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3063583815028902,
						"acc_stderr,none": 0.024818350129436603,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3464052287581699,
						"acc_stderr,none": 0.027245613047215355,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.39652397811393625,
						"acc_stderr,none": 0.058601563774905024,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4437299035369775,
						"acc_stderr,none": 0.02821768355665231,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.404320987654321,
						"acc_stderr,none": 0.027306625297327677,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2624113475177305,
						"acc_stderr,none": 0.026244920349843017,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3050847457627119,
						"acc_stderr,none": 0.011759939618085455,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.45588235294117646,
						"acc_stderr,none": 0.030254372573976694,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3235294117647059,
						"acc_stderr,none": 0.018926082916083393,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4090909090909091,
						"acc_stderr,none": 0.04709306978661895,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.39591836734693875,
						"acc_stderr,none": 0.03130802899065686,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4007149821254469,
						"acc_stderr,none": 0.059349912838627235,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.43781094527363185,
						"acc_stderr,none": 0.0350808011219984,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.31398667935299723,
						"acc_stderr,none": 0.06318036084319219,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145632,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.03680783690727581,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.40350877192982454,
						"acc_stderr,none": 0.03762738699917056,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.5878757004584819,
						"acc_stderr,none": 0.004968597754980377,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.5926566314076485,
						"acc_stderr,none": 0.004955449461645356,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7598039215686274,
						"acc_stderr,none": 0.021175638634786546,
						"alias": " - mrpc",
						"f1,none": 0.8444444444444444,
						"f1_stderr,none": 0.015535843567066374
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.05789473684210526,
						"exact_match_stderr,remove_whitespace": 0.00388754846309952
					},
					"openbookqa": {
						"acc,none": 0.3,
						"acc_norm,none": 0.418,
						"acc_norm_stderr,none": 0.022080014812228137,
						"acc_stderr,none": 0.020514426225628043,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.443,
						"acc_stderr,none": 0.011110230358066702,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.361,
						"acc_stderr,none": 0.010742308811391424,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4425,
						"acc_stderr,none": 0.011108941411747605,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5475,
						"acc_stderr,none": 0.011132557743886098,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.545,
						"acc_stderr,none": 0.011137752231145224,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.523,
						"acc_stderr,none": 0.011171297997523606,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5075,
						"acc_stderr,none": 0.011181877847486001,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4813571428571429,
						"acc_stderr,none": 0.0534526449360939,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7714907508161044,
						"acc_norm,none": 0.7840043525571273,
						"acc_norm_stderr,none": 0.009601236303553551,
						"acc_stderr,none": 0.00979631351182952,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7308011475391978,
						"acc_norm,none": 0.6189468351657585,
						"acc_norm_stderr,none": 0.010003645220049909,
						"acc_stderr,none": 0.15142216889151972,
						"alias": "pythia",
						"bits_per_byte,none": 0.6350900240825521,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5530346562463961,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3929482862948435,
						"perplexity_stderr,none": 0.06685882993754429,
						"word_perplexity,none": 10.527657507813059,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49441698700347797,
						"acc_stderr,none": 0.006764988782474202,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6795943606232995,
						"acc_stderr,none": 0.002320750534880591,
						"alias": " - qqp",
						"f1,none": 0.6886057692307692,
						"f1_stderr,none": 0.002597567940763257
					},
					"record": {
						"alias": "record",
						"em,none": 0.2635,
						"em_stderr,none": 0.004405532416359068,
						"f1,none": 0.27329857167601584,
						"f1_stderr,none": 0.004417896871620672
					},
					"rte": {
						"acc,none": 0.6064981949458483,
						"acc_stderr,none": 0.029405839314203198,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.953,
						"acc_norm,none": 0.923,
						"acc_norm_stderr,none": 0.008434580140240672,
						"acc_stderr,none": 0.00669595667816304,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.856651376146789,
						"acc_stderr,none": 0.011873800423675024,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.32656045167970366,
						"acc_stderr,none": 0.0015130713379683325,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31456548347613217,
						"bleu_acc_stderr,none": 0.016255241993179178,
						"bleu_diff,none": -7.433818428450569,
						"bleu_diff_stderr,none": 0.863288167943516,
						"bleu_max,none": 26.69350922131407,
						"bleu_max_stderr,none": 0.797949354565667,
						"rouge1_acc,none": 0.29008567931456547,
						"rouge1_acc_stderr,none": 0.01588623687420952,
						"rouge1_diff,none": -9.685337443876643,
						"rouge1_diff_stderr,none": 0.9190145342369646,
						"rouge1_max,none": 51.96058575416866,
						"rouge1_max_stderr,none": 0.8575985110026056,
						"rouge2_acc,none": 0.2631578947368421,
						"rouge2_acc_stderr,none": 0.015415241740237014,
						"rouge2_diff,none": -11.613716960826352,
						"rouge2_diff_stderr,none": 1.1242858242883977,
						"rouge2_max,none": 35.95215198255373,
						"rouge2_max_stderr,none": 1.0106022291829948,
						"rougeL_acc,none": 0.29498164014687883,
						"rougeL_acc_stderr,none": 0.015964400965589667,
						"rougeL_diff,none": -9.854204504743402,
						"rougeL_diff_stderr,none": 0.9358254993395869,
						"rougeL_max,none": 49.307668496885384,
						"rougeL_max_stderr,none": 0.8739261865666953
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.31456548347613217,
						"bleu_acc_stderr,none": 0.016255241993179178,
						"bleu_diff,none": -7.433818428450569,
						"bleu_diff_stderr,none": 0.863288167943516,
						"bleu_max,none": 26.69350922131407,
						"bleu_max_stderr,none": 0.797949354565667,
						"rouge1_acc,none": 0.29008567931456547,
						"rouge1_acc_stderr,none": 0.01588623687420952,
						"rouge1_diff,none": -9.685337443876643,
						"rouge1_diff_stderr,none": 0.9190145342369646,
						"rouge1_max,none": 51.96058575416866,
						"rouge1_max_stderr,none": 0.8575985110026056,
						"rouge2_acc,none": 0.2631578947368421,
						"rouge2_acc_stderr,none": 0.015415241740237014,
						"rouge2_diff,none": -11.613716960826352,
						"rouge2_diff_stderr,none": 1.1242858242883977,
						"rouge2_max,none": 35.95215198255373,
						"rouge2_max_stderr,none": 1.0106022291829948,
						"rougeL_acc,none": 0.29498164014687883,
						"rougeL_acc_stderr,none": 0.015964400965589667,
						"rougeL_diff,none": -9.854204504743402,
						"rougeL_diff_stderr,none": 0.9358254993395869,
						"rougeL_max,none": 49.307668496885384,
						"rougeL_max_stderr,none": 0.8739261865666953
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25458996328029376,
						"acc_stderr,none": 0.015250117079156494,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3985309400791136,
						"acc_stderr,none": 0.014226081390087606,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6350900240825521,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5530346562463961,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.527657507813059,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6787687450670876,
						"acc_stderr,none": 0.01312359932455832,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4225352112676056,
						"acc_stderr,none": 0.05903984205682581,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6214545454545455,
						"acc_stderr,none": 0.07027373008728728,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.021930844120728505,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231336,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177514,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.742,
						"acc_stderr,none": 0.019586711785215837,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.02226169729227013,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.64,
						"acc_stderr,none": 0.021487751089720532,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.020143572847290785,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.020776701920308997,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43684069611780457,
						"acc_stderr,none": 0.05199253253663402,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.334136546184739,
						"acc_stderr,none": 0.009454577602463623,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4755020080321285,
						"acc_stderr,none": 0.010010036112667863,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4955823293172691,
						"acc_stderr,none": 0.010021681681769352,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3859437751004016,
						"acc_stderr,none": 0.009757838842063332,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.52570281124498,
						"acc_stderr,none": 0.010008822253312075,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5012048192771085,
						"acc_stderr,none": 0.010022043771315572,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5040160642570282,
						"acc_stderr,none": 0.010021749574555901,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42048192771084336,
						"acc_stderr,none": 0.009894519551105777,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4903614457831325,
						"acc_stderr,none": 0.010020210558438302,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39718875502008033,
						"acc_stderr,none": 0.009807915070677294,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41807228915662653,
						"acc_stderr,none": 0.009886618180256037,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44738955823293175,
						"acc_stderr,none": 0.00996643909140793,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.4004016064257028,
						"acc_stderr,none": 0.00982122560976308,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41365461847389556,
						"acc_stderr,none": 0.00987150215909937,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3429718875502008,
						"acc_stderr,none": 0.009514999934033461,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.630226821490885,
						"acc_stderr,none": 0.0634364403908616,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5969556585043018,
						"acc_stderr,none": 0.012622895215907707,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.786234281932495,
						"acc_stderr,none": 0.01055009920692158,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7174056915949703,
						"acc_stderr,none": 0.011587123627044841,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5638649900727994,
						"acc_stderr,none": 0.012761730431435775,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6015883520847121,
						"acc_stderr,none": 0.012598743938252869,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6697551290536069,
						"acc_stderr,none": 0.012102848336416566,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5479814692256784,
						"acc_stderr,none": 0.012807742345189279,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6836532097948379,
						"acc_stderr,none": 0.011967713146973763,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5506287227001986,
						"acc_stderr,none": 0.012800991591293375,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5804103242885507,
						"acc_stderr,none": 0.012699642268200757,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6340172071475844,
						"acc_stderr,none": 0.012396308684399377,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8152393796358732,
						"acc_stderr,none": 0.036391676441638335,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8731182795698925,
						"acc_stderr,none": 0.006904273834512096,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.0506639425494172,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7549530761209593,
						"acc_stderr,none": 0.01389638547259635,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7718631178707225,
						"acc_stderr,none": 0.025924909559244272,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6825396825396826,
						"acc_stderr,none": 0.026269018848607703,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7876984126984127,
						"acc_stderr,none": 0.018233607978187162,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/R4-with-shuffle-rwkv-53_pth"
	},
	"./rwkv-x-dev/RWKV-32K-5B-RW_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6271138669673055,
						"acc_norm,none": 0.6220405862457723,
						"acc_norm_stderr,none": 0.09220958615138242,
						"acc_stderr,none": 0.10493137727598363,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4440625,
						"acc_stderr,none": 0.04944233235136687,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0986,
						"acc_stderr,none": 0.047642507238954954,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8192388059701494,
						"acc_stderr,none": 0.16678526060108925,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2934621099554233,
						"acc_norm,none": 0.2934621099554233,
						"acc_norm_stderr,none": 0.12702360353544004,
						"acc_stderr,none": 0.12702360353544004,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.31333103091003284,
						"acc_norm,none": 0.31333103091003284,
						"acc_norm_stderr,none": 0.05752357691944162,
						"acc_stderr,none": 0.05752357691944162,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.587134764460346,
						"likelihood_diff_stderr,none": 0.5489141401379695,
						"pct_stereotype,none": 0.625968992248062,
						"pct_stereotype_stderr,none": 0.06286909045014075
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.04084645669291338,
						"exact_match_stderr,none": 0.004392040138400883
					},
					"glue": {
						"acc,none": 0.725598499285374,
						"acc_stderr,none": 0.004272454313272669,
						"alias": "glue",
						"f1,none": 0.7127557064553156,
						"f1_stderr,none": 0.00010946858684539513,
						"mcc,none": 0.202564842873813,
						"mcc_stderr,none": 0.02971584649701416
					},
					"kmmlu": {
						"acc,none": 0.17054576956396186,
						"acc_norm,none": 0.17054576956396186,
						"acc_norm_stderr,none": 0.041322818791641916,
						"acc_stderr,none": 0.041322818791641916,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5428634071475553,
						"acc_norm,none": 0.564,
						"acc_norm_stderr,none": 0.0004927935871743445,
						"acc_stderr,none": 0.04866440193877248,
						"alias": "kobest",
						"f1,none": 0.47830807821134774,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6993013778381525,
						"acc_stderr,none": 0.027853412339720694,
						"alias": "lambada",
						"perplexity,none": 4.052754931444214,
						"perplexity_stderr,none": 0.4328722276454289
					},
					"lambada_cloze": {
						"acc,none": 0.027653793906462255,
						"acc_stderr,none": 0.008415325074858027,
						"alias": "lambada_cloze",
						"perplexity,none": 917.7022903355223,
						"perplexity_stderr,none": 308.03866610140574
					},
					"lambada_multilingual": {
						"acc,none": 0.5383660003881234,
						"acc_stderr,none": 0.08797230751794759,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.09561131864882,
						"perplexity_stderr,none": 8.32111730106021
					},
					"mmlu": {
						"acc,none": 0.38221051132317335,
						"acc_stderr,none": 0.07911775550418505,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.37364505844845913,
						"acc_stderr,none": 0.08226320218357598,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.43321532024460896,
						"acc_stderr,none": 0.07614327485150894,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.40558986025349364,
						"acc_stderr,none": 0.06769387954879527,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3219156359023153,
						"acc_stderr,none": 0.06137464307640257,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.3697657913413769,
						"acc_norm,none": 0.34706416452176814,
						"acc_norm_stderr,none": 9.989510304536923e-05,
						"acc_stderr,none": 0.06725592922652983,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.48442857142857143,
						"acc_stderr,none": 0.058524570106730234,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7382745946004722,
						"acc_norm,none": 0.6260143595341415,
						"acc_norm_stderr,none": 0.010390231880828323,
						"acc_stderr,none": 0.15635091600076567,
						"alias": "pythia",
						"bits_per_byte,none": 0.636652864431621,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5547179366814612,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.204600378904186,
						"perplexity_stderr,none": 0.06189826726959566,
						"word_perplexity,none": 10.588818863224049,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3953900709219858,
						"acc_norm,none": 0.44858156028368795,
						"acc_norm_stderr,none": 0.06143650017963884,
						"acc_stderr,none": 0.046367380633439884,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6524242121726398,
						"acc_stderr,none": 0.07222213929458608,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.33295656338709595,
						"acc_stderr,none": 0.0016609424733593937,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3353733170134639,
						"bleu_acc_stderr,none": 0.016527534039668987,
						"bleu_diff,none": -5.412376582562824,
						"bleu_diff_stderr,none": 0.8863697530823587,
						"bleu_max,none": 28.083021259886035,
						"bleu_max_stderr,none": 0.8038601307574218,
						"rouge1_acc,none": 0.29865361077111385,
						"rouge1_acc_stderr,none": 0.016021570613768542,
						"rouge1_diff,none": -7.1550465399807495,
						"rouge1_diff_stderr,none": 0.9939764518093742,
						"rouge1_max,none": 53.377525795884935,
						"rouge1_max_stderr,none": 0.864181273769323,
						"rouge2_acc,none": 0.27906976744186046,
						"rouge2_acc_stderr,none": 0.0157021070906279,
						"rouge2_diff,none": -8.469230330133747,
						"rouge2_diff_stderr,none": 1.1878707267198763,
						"rouge2_max,none": 38.028606402210094,
						"rouge2_max_stderr,none": 1.0166979101890905,
						"rougeL_acc,none": 0.2998776009791922,
						"rougeL_acc_stderr,none": 0.016040352966713623,
						"rougeL_diff,none": -7.235599218628369,
						"rougeL_diff_stderr,none": 1.0097982578621894,
						"rougeL_max,none": 50.73055876421228,
						"rougeL_max_stderr,none": 0.8819793717714633
					},
					"xcopa": {
						"acc,none": 0.6261818181818183,
						"acc_stderr,none": 0.06980456365734454,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.436091030789826,
						"acc_stderr,none": 0.05246585678355937,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6293243487154804,
						"acc_stderr,none": 0.06499818020097718,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8174870757473589,
						"acc_stderr,none": 0.038517432971331256,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6271138669673055,
						"acc_norm,none": 0.6220405862457723,
						"acc_norm_stderr,none": 0.09220958615138242,
						"acc_stderr,none": 0.10493137727598363,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4440625,
						"acc_stderr,none": 0.04944233235136687,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.015757928553979166,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.419,
						"acc_stderr,none": 0.01561033896757779,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.38166666666666665,
						"acc_stderr,none": 0.014029553645718743,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4052901023890785,
						"acc_norm,none": 0.4274744027303754,
						"acc_norm_stderr,none": 0.014456862944650659,
						"acc_stderr,none": 0.014346869060229337,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7365319865319865,
						"acc_norm,none": 0.718013468013468,
						"acc_norm_stderr,none": 0.009233124071053651,
						"acc_stderr,none": 0.009039157374497713,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0986,
						"acc_stderr,none": 0.047642507238954954,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.026,
						"acc_stderr,none": 0.0035592603398856937,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.1995,
						"acc_stderr,none": 0.008938110533549891,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.086,
						"acc_stderr,none": 0.0062706985918967026,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.078,
						"acc_stderr,none": 0.005997998665721491,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.09,
						"acc_stderr,none": 0.006400819107162966,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.1395,
						"acc_stderr,none": 0.007749187050909052,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0395,
						"acc_stderr,none": 0.004356531267228613,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.219,
						"acc_stderr,none": 0.009249988169752811,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.034,
						"acc_stderr,none": 0.004053420174069583,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.086,
						"acc_stderr,none": 0.0062706985918967026,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0017353579175704988,
						"acc_stderr,none": 0.0008671138796248255,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8192388059701494,
						"acc_stderr,none": 0.16678526060108925,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400236,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.005893957816165561,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.0098200016513457,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315143,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099173,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.014297146862517908,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487912,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621236,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098728,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448817,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792945,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318211,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679197,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244068,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406094,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319415,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295432,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.796,
						"acc_stderr,none": 0.012749374359024391,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.014553205687950427,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.695,
						"acc_stderr,none": 0.014566646394664382,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426123,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.0098200016513457,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275288,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.278,
						"acc_stderr,none": 0.01417451646148525,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074798,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.826,
						"acc_stderr,none": 0.01199449323097343,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.015680876566375058,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.014267009061031314,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.011800434324644598,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.00961683333969579,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504417,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.693,
						"acc_stderr,none": 0.014593284892852627,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140924,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.377,
						"acc_stderr,none": 0.015333170125779855,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.646,
						"acc_stderr,none": 0.015129868238451773,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.015572363292015097,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.0154458594637713,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.515,
						"acc_stderr,none": 0.01581217964181488,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345686,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.766,
						"acc_stderr,none": 0.01339490288966001,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323487,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584937,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787731,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.01258369378796813,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.669,
						"acc_stderr,none": 0.014888272588203936,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.01064016979249935,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469295,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.642,
						"acc_stderr,none": 0.015167928865407559,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.015672320237336206,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452373,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.015316971293620996,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890145,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151112,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.368,
						"acc_stderr,none": 0.015258073561521798,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.866,
						"acc_stderr,none": 0.010777762298369676,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.00751375115747492,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662765,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.00396985639031942,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.0063463592930338335,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.414,
						"acc_stderr,none": 0.015583544104177522,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.015356947477797582,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7165137614678899,
						"acc_stderr,none": 0.007882623766693438,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8214285714285714,
						"acc_stderr,none": 0.05164277182008721,
						"alias": "cb",
						"f1,none": 0.5722610722610723,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2934621099554233,
						"acc_norm,none": 0.2934621099554233,
						"acc_norm_stderr,none": 0.12702360353544004,
						"acc_stderr,none": 0.12702360353544004,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.32653061224489793,
						"acc_norm,none": 0.32653061224489793,
						"acc_norm_stderr,none": 0.06768622021133469,
						"acc_stderr,none": 0.06768622021133469,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275463,
						"acc_stderr,none": 0.08124094920275463,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2978723404255319,
						"acc_norm,none": 0.2978723404255319,
						"acc_norm_stderr,none": 0.06742861107915606,
						"acc_stderr,none": 0.06742861107915606,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629555,
						"acc_stderr,none": 0.10497277621629555,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.05781449705557245,
						"acc_stderr,none": 0.05781449705557245,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.07802030664724673,
						"acc_stderr,none": 0.07802030664724673,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.11967838846954226,
						"acc_stderr,none": 0.11967838846954226,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.45,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.11413288653790232,
						"acc_stderr,none": 0.11413288653790232,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.052631578947368404,
						"acc_stderr,none": 0.052631578947368404,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.1049727762162956,
						"acc_stderr,none": 0.1049727762162956,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.47619047619047616,
						"acc_norm,none": 0.47619047619047616,
						"acc_norm_stderr,none": 0.11167656571008164,
						"acc_stderr,none": 0.11167656571008164,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.14213381090374033,
						"acc_stderr,none": 0.14213381090374033,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482894,
						"acc_stderr,none": 0.09810018692482894,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.06652247352247599,
						"acc_stderr,none": 0.06652247352247599,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.08446516354424752,
						"acc_stderr,none": 0.08446516354424752,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.058172215566282534,
						"acc_stderr,none": 0.058172215566282534,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.5227272727272727,
						"acc_norm,none": 0.5227272727272727,
						"acc_norm_stderr,none": 0.07617047451458002,
						"acc_stderr,none": 0.07617047451458002,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.3695652173913043,
						"acc_norm,none": 0.3695652173913043,
						"acc_norm_stderr,none": 0.07195473383945741,
						"acc_stderr,none": 0.07195473383945741,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.31333103091003284,
						"acc_norm,none": 0.31333103091003284,
						"acc_norm_stderr,none": 0.05752357691944162,
						"acc_stderr,none": 0.05752357691944162,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.3136094674556213,
						"acc_norm,none": 0.3136094674556213,
						"acc_norm_stderr,none": 0.03579526516456225,
						"acc_stderr,none": 0.03579526516456225,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.3310810810810811,
						"acc_norm,none": 0.3310810810810811,
						"acc_norm_stderr,none": 0.03881461247660828,
						"acc_stderr,none": 0.03881461247660828,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.033635910482728223,
						"acc_stderr,none": 0.033635910482728223,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.0393415738622931,
						"acc_stderr,none": 0.0393415738622931,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.03567969772268047,
						"acc_stderr,none": 0.03567969772268047,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3444976076555024,
						"acc_norm,none": 0.3444976076555024,
						"acc_norm_stderr,none": 0.03294948099678349,
						"acc_stderr,none": 0.03294948099678349,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.29770992366412213,
						"acc_norm,none": 0.29770992366412213,
						"acc_norm_stderr,none": 0.040103589424622034,
						"acc_stderr,none": 0.040103589424622034,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.34558823529411764,
						"acc_norm,none": 0.34558823529411764,
						"acc_norm_stderr,none": 0.04092966025145302,
						"acc_stderr,none": 0.04092966025145302,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.29906542056074764,
						"acc_norm,none": 0.29906542056074764,
						"acc_norm_stderr,none": 0.044470182376718334,
						"acc_stderr,none": 0.044470182376718334,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3498452012383901,
						"acc_norm,none": 0.3498452012383901,
						"acc_norm_stderr,none": 0.02657776217561485,
						"acc_stderr,none": 0.02657776217561485,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.03166009679399813,
						"acc_stderr,none": 0.03166009679399813,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.37988826815642457,
						"acc_norm,none": 0.37988826815642457,
						"acc_norm_stderr,none": 0.0363791806643084,
						"acc_stderr,none": 0.0363791806643084,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.26582278481012656,
						"acc_norm,none": 0.26582278481012656,
						"acc_norm_stderr,none": 0.02875679962965833,
						"acc_stderr,none": 0.02875679962965833,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.04350546818999062,
						"acc_stderr,none": 0.04350546818999062,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.40186915887850466,
						"acc_norm,none": 0.40186915887850466,
						"acc_norm_stderr,none": 0.04761979313593575,
						"acc_stderr,none": 0.04761979313593575,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.4056603773584906,
						"acc_norm,none": 0.4056603773584906,
						"acc_norm_stderr,none": 0.04791858528000114,
						"acc_stderr,none": 0.04791858528000114,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.040191074725573483,
						"acc_stderr,none": 0.040191074725573483,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.0423247353205504,
						"acc_stderr,none": 0.0423247353205504,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800375,
						"acc_stderr,none": 0.04142972007800375,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.29304029304029305,
						"acc_norm,none": 0.29304029304029305,
						"acc_norm_stderr,none": 0.02759793255358406,
						"acc_stderr,none": 0.02759793255358406,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.0332057461294543,
						"acc_stderr,none": 0.0332057461294543,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.24561403508771928,
						"acc_norm,none": 0.24561403508771928,
						"acc_norm_stderr,none": 0.0330140594698725,
						"acc_stderr,none": 0.0330140594698725,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.3129251700680272,
						"acc_norm,none": 0.3129251700680272,
						"acc_norm_stderr,none": 0.03837477482026868,
						"acc_stderr,none": 0.03837477482026868,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.30935251798561153,
						"acc_norm,none": 0.30935251798561153,
						"acc_norm_stderr,none": 0.03934735112547112,
						"acc_stderr,none": 0.03934735112547112,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3270440251572327,
						"acc_norm,none": 0.3270440251572327,
						"acc_norm_stderr,none": 0.03732225646493121,
						"acc_stderr,none": 0.03732225646493121,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3128834355828221,
						"acc_norm,none": 0.3128834355828221,
						"acc_norm_stderr,none": 0.036429145782924055,
						"acc_stderr,none": 0.036429145782924055,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.29651162790697677,
						"acc_norm,none": 0.29651162790697677,
						"acc_norm_stderr,none": 0.03492619473255952,
						"acc_stderr,none": 0.03492619473255952,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2976190476190476,
						"acc_norm,none": 0.2976190476190476,
						"acc_norm_stderr,none": 0.02885890598472122,
						"acc_stderr,none": 0.02885890598472122,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2676767676767677,
						"acc_norm,none": 0.2676767676767677,
						"acc_norm_stderr,none": 0.031544498882702866,
						"acc_stderr,none": 0.031544498882702866,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.4411764705882353,
						"acc_norm,none": 0.4411764705882353,
						"acc_norm_stderr,none": 0.0322529423239964,
						"acc_stderr,none": 0.0322529423239964,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.22608695652173913,
						"acc_norm,none": 0.22608695652173913,
						"acc_norm_stderr,none": 0.02764178570724133,
						"acc_stderr,none": 0.02764178570724133,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.31851851851851853,
						"acc_norm,none": 0.31851851851851853,
						"acc_norm_stderr,none": 0.04024778401977111,
						"acc_stderr,none": 0.04024778401977111,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.32867132867132864,
						"acc_norm,none": 0.32867132867132864,
						"acc_norm_stderr,none": 0.0394188850126319,
						"acc_stderr,none": 0.0394188850126319,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.3352272727272727,
						"acc_norm,none": 0.3352272727272727,
						"acc_norm_stderr,none": 0.03568512682153708,
						"acc_stderr,none": 0.03568512682153708,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.33557046979865773,
						"acc_norm,none": 0.33557046979865773,
						"acc_norm_stderr,none": 0.03881373830315734,
						"acc_stderr,none": 0.03881373830315734,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3305084745762712,
						"acc_norm,none": 0.3305084745762712,
						"acc_norm_stderr,none": 0.043488147791922734,
						"acc_stderr,none": 0.043488147791922734,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2804878048780488,
						"acc_norm,none": 0.2804878048780488,
						"acc_norm_stderr,none": 0.03518700228801578,
						"acc_stderr,none": 0.03518700228801578,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.04265792110940589,
						"acc_stderr,none": 0.04265792110940589,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3006993006993007,
						"acc_norm,none": 0.3006993006993007,
						"acc_norm_stderr,none": 0.03848167949490064,
						"acc_stderr,none": 0.03848167949490064,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.30952380952380953,
						"acc_norm,none": 0.30952380952380953,
						"acc_norm_stderr,none": 0.04134913018303316,
						"acc_stderr,none": 0.04134913018303316,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3546511627906977,
						"acc_norm,none": 0.3546511627906977,
						"acc_norm_stderr,none": 0.03658473425938543,
						"acc_stderr,none": 0.03658473425938543,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.291970802919708,
						"acc_norm,none": 0.291970802919708,
						"acc_norm_stderr,none": 0.022454498879013785,
						"acc_stderr,none": 0.022454498879013785,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.43457943925233644,
						"acc_norm,none": 0.43457943925233644,
						"acc_norm_stderr,none": 0.03396491908994051,
						"acc_stderr,none": 0.03396491908994051,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3008130081300813,
						"acc_norm,none": 0.3008130081300813,
						"acc_norm_stderr,none": 0.04152073768551428,
						"acc_stderr,none": 0.04152073768551428,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3360655737704918,
						"acc_norm,none": 0.3360655737704918,
						"acc_norm_stderr,none": 0.04294196582481048,
						"acc_stderr,none": 0.04294196582481048,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.319047619047619,
						"acc_norm,none": 0.319047619047619,
						"acc_norm_stderr,none": 0.03224133248962466,
						"acc_stderr,none": 0.03224133248962466,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3277777777777778,
						"acc_norm,none": 0.3277777777777778,
						"acc_norm_stderr,none": 0.035084853738606925,
						"acc_stderr,none": 0.035084853738606925,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31216931216931215,
						"acc_norm,none": 0.31216931216931215,
						"acc_norm_stderr,none": 0.03379535035917228,
						"acc_stderr,none": 0.03379535035917228,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.041265147363240995,
						"acc_stderr,none": 0.041265147363240995,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.31724137931034485,
						"acc_norm,none": 0.31724137931034485,
						"acc_norm_stderr,none": 0.038783523721386215,
						"acc_stderr,none": 0.038783523721386215,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.34285714285714286,
						"acc_norm,none": 0.34285714285714286,
						"acc_norm_stderr,none": 0.04654465622977446,
						"acc_stderr,none": 0.04654465622977446,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.3028571428571429,
						"acc_norm,none": 0.3028571428571429,
						"acc_norm_stderr,none": 0.03483414676585985,
						"acc_stderr,none": 0.03483414676585985,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2796208530805687,
						"acc_norm,none": 0.2796208530805687,
						"acc_norm_stderr,none": 0.0309710334408709,
						"acc_stderr,none": 0.0309710334408709,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2712765957446808,
						"acc_norm,none": 0.2712765957446808,
						"acc_norm_stderr,none": 0.022960000252372686,
						"acc_stderr,none": 0.022960000252372686,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.031924831026639656,
						"acc_stderr,none": 0.031924831026639656,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3563218390804598,
						"acc_norm,none": 0.3563218390804598,
						"acc_norm_stderr,none": 0.036410995772554904,
						"acc_stderr,none": 0.036410995772554904,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.0391545063041425,
						"acc_stderr,none": 0.0391545063041425,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.336283185840708,
						"acc_norm,none": 0.336283185840708,
						"acc_norm_stderr,none": 0.031495806053189676,
						"acc_stderr,none": 0.031495806053189676,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.32727272727272727,
						"acc_norm,none": 0.32727272727272727,
						"acc_norm_stderr,none": 0.03663974994391242,
						"acc_stderr,none": 0.03663974994391242,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.35502958579881655,
						"acc_norm,none": 0.35502958579881655,
						"acc_norm_stderr,none": 0.03691879594576913,
						"acc_stderr,none": 0.03691879594576913,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.037653278425410414,
						"acc_stderr,none": 0.037653278425410414,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.31875,
						"acc_norm,none": 0.31875,
						"acc_norm_stderr,none": 0.036955560385363254,
						"acc_stderr,none": 0.036955560385363254,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.202564842873813,
						"mcc_stderr,none": 0.02971584649701416
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.03265986323710906,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.587134764460346,
						"likelihood_diff_stderr,none": 0.5489141401379695,
						"pct_stereotype,none": 0.625968992248062,
						"pct_stereotype_stderr,none": 0.06286909045014075
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.894081693500298,
						"likelihood_diff_stderr,none": 0.09171713242917301,
						"pct_stereotype,none": 0.6493738819320215,
						"pct_stereotype_stderr,none": 0.011655543596818141
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.9574175824175826,
						"likelihood_diff_stderr,none": 0.4055533487685927,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 7.3977272727272725,
						"likelihood_diff_stderr,none": 1.6361347693485013,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.076923076923077,
						"likelihood_diff_stderr,none": 0.6124196149400455,
						"pct_stereotype,none": 0.7692307692307693,
						"pct_stereotype_stderr,none": 0.05266563052934292
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.78046875,
						"likelihood_diff_stderr,none": 0.16474784896653938,
						"pct_stereotype,none": 0.60625,
						"pct_stereotype_stderr,none": 0.027355258158219247
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.6244212962962963,
						"likelihood_diff_stderr,none": 0.2517548467391104,
						"pct_stereotype,none": 0.5740740740740741,
						"pct_stereotype_stderr,none": 0.03372343271653063
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.387152777777778,
						"likelihood_diff_stderr,none": 0.35726995600402095,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.874507874015748,
						"likelihood_diff_stderr,none": 0.16803778899836555,
						"pct_stereotype,none": 0.5846456692913385,
						"pct_stereotype_stderr,none": 0.021885262514438345
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.7736486486486487,
						"likelihood_diff_stderr,none": 0.36428351781425694,
						"pct_stereotype,none": 0.7027027027027027,
						"pct_stereotype_stderr,none": 0.04357977161242459
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.241935483870968,
						"likelihood_diff_stderr,none": 0.4600334232848114,
						"pct_stereotype,none": 0.8924731182795699,
						"pct_stereotype_stderr,none": 0.032297000033640014
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.343421052631579,
						"likelihood_diff_stderr,none": 0.2539337148940524,
						"pct_stereotype,none": 0.7,
						"pct_stereotype_stderr,none": 0.03333333333333337
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.280784138342278,
						"likelihood_diff_stderr,none": 0.07659000065624492,
						"pct_stereotype,none": 0.6028622540250447,
						"pct_stereotype_stderr,none": 0.011952056091200874
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.5180555555555557,
						"likelihood_diff_stderr,none": 0.3061324496755405,
						"pct_stereotype,none": 0.6,
						"pct_stereotype_stderr,none": 0.051929078688949845
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.605769230769231,
						"likelihood_diff_stderr,none": 0.5557548130505006,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.912878787878788,
						"likelihood_diff_stderr,none": 0.4628850826867974,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.05615974350262316
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.9049844236760123,
						"likelihood_diff_stderr,none": 0.143028275071876,
						"pct_stereotype,none": 0.5919003115264797,
						"pct_stereotype_stderr,none": 0.02747466632766759
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.3186758893280635,
						"likelihood_diff_stderr,none": 0.19781255149969504,
						"pct_stereotype,none": 0.4624505928853755,
						"pct_stereotype_stderr,none": 0.031408094828172445
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.3368055555555554,
						"likelihood_diff_stderr,none": 0.4274259252916918,
						"pct_stereotype,none": 0.6805555555555556,
						"pct_stereotype_stderr,none": 0.055335047518872166
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.0554347826086956,
						"likelihood_diff_stderr,none": 0.15228651668166915,
						"pct_stereotype,none": 0.5347826086956522,
						"pct_stereotype_stderr,none": 0.023281462893244318
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.240217391304348,
						"likelihood_diff_stderr,none": 0.2655407489713063,
						"pct_stereotype,none": 0.7652173913043478,
						"pct_stereotype_stderr,none": 0.039698395317531235
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.3722527472527473,
						"likelihood_diff_stderr,none": 0.3122998627862558,
						"pct_stereotype,none": 0.8241758241758241,
						"pct_stereotype_stderr,none": 0.040126194689023176
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.7410714285714284,
						"likelihood_diff_stderr,none": 0.24747795756783392,
						"pct_stereotype,none": 0.7142857142857143,
						"pct_stereotype_stderr,none": 0.032350772404131325
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.04084645669291338,
						"exact_match_stderr,none": 0.004392040138400883
					},
					"glue": {
						"acc,none": 0.725598499285374,
						"acc_stderr,none": 0.004272454313272669,
						"alias": "glue",
						"f1,none": 0.7127557064553156,
						"f1_stderr,none": 0.00010946858684539513,
						"mcc,none": 0.202564842873813,
						"mcc_stderr,none": 0.02971584649701416
					},
					"hellaswag": {
						"acc,none": 0.5425214100776737,
						"acc_norm,none": 0.7252539334793866,
						"acc_norm_stderr,none": 0.004454739415705047,
						"acc_stderr,none": 0.004971704917267752,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.17054576956396186,
						"acc_norm,none": 0.17054576956396186,
						"acc_norm_stderr,none": 0.041322818791641916,
						"acc_stderr,none": 0.041322818791641916,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.04560480215720683,
						"acc_stderr,none": 0.04560480215720683,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.145,
						"acc_norm,none": 0.145,
						"acc_norm_stderr,none": 0.011139977517890124,
						"acc_stderr,none": 0.011139977517890124,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.011297239823409303,
						"acc_stderr,none": 0.011297239823409303,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.213,
						"acc_norm,none": 0.213,
						"acc_norm_stderr,none": 0.012953717566737228,
						"acc_stderr,none": 0.012953717566737228,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.222,
						"acc_norm,none": 0.222,
						"acc_norm_stderr,none": 0.013148721948877366,
						"acc_stderr,none": 0.013148721948877366,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.21166666666666667,
						"acc_norm,none": 0.21166666666666667,
						"acc_norm_stderr,none": 0.016690439361851196,
						"acc_stderr,none": 0.016690439361851196,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.113,
						"acc_norm,none": 0.113,
						"acc_norm_stderr,none": 0.010016552866696844,
						"acc_stderr,none": 0.010016552866696844,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.187,
						"acc_norm,none": 0.187,
						"acc_norm_stderr,none": 0.012336254828074112,
						"acc_stderr,none": 0.012336254828074112,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.106,
						"acc_norm,none": 0.106,
						"acc_norm_stderr,none": 0.00973955126578514,
						"acc_stderr,none": 0.00973955126578514,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.03089738243291861,
						"acc_stderr,none": 0.03089738243291861,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.155,
						"acc_norm,none": 0.155,
						"acc_norm_stderr,none": 0.01145015747079945,
						"acc_stderr,none": 0.01145015747079945,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.03709560170541631,
						"acc_stderr,none": 0.03709560170541631,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.04093601807403326,
						"acc_stderr,none": 0.04093601807403326,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.095,
						"acc_norm,none": 0.095,
						"acc_norm_stderr,none": 0.0092769101031033,
						"acc_stderr,none": 0.0092769101031033,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.144,
						"acc_norm,none": 0.144,
						"acc_norm_stderr,none": 0.01110798754893915,
						"acc_stderr,none": 0.01110798754893915,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.19,
						"acc_norm,none": 0.19,
						"acc_norm_stderr,none": 0.012411851354816341,
						"acc_stderr,none": 0.012411851354816341,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.101,
						"acc_norm,none": 0.101,
						"acc_norm_stderr,none": 0.009533618929341015,
						"acc_stderr,none": 0.009533618929341015,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.012535235623319319,
						"acc_stderr,none": 0.012535235623319319,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.176,
						"acc_norm,none": 0.176,
						"acc_norm_stderr,none": 0.012048616898597509,
						"acc_stderr,none": 0.012048616898597509,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.164,
						"acc_norm,none": 0.164,
						"acc_norm_stderr,none": 0.011715000693181325,
						"acc_stderr,none": 0.011715000693181325,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.151,
						"acc_norm,none": 0.151,
						"acc_norm_stderr,none": 0.011328165223341676,
						"acc_stderr,none": 0.011328165223341676,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.12,
						"acc_norm,none": 0.12,
						"acc_norm_stderr,none": 0.010281328012747393,
						"acc_stderr,none": 0.010281328012747393,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.163,
						"acc_norm,none": 0.163,
						"acc_norm_stderr,none": 0.011686212712746833,
						"acc_stderr,none": 0.011686212712746833,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.145,
						"acc_norm,none": 0.145,
						"acc_norm_stderr,none": 0.011139977517890146,
						"acc_stderr,none": 0.011139977517890146,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.218,
						"acc_norm,none": 0.218,
						"acc_norm_stderr,none": 0.013063179040595296,
						"acc_stderr,none": 0.013063179040595296,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.135,
						"acc_norm,none": 0.135,
						"acc_norm_stderr,none": 0.010811655372416053,
						"acc_stderr,none": 0.010811655372416053,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.222,
						"acc_norm,none": 0.222,
						"acc_norm_stderr,none": 0.013148721948877364,
						"acc_stderr,none": 0.013148721948877364,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.18333333333333332,
						"acc_norm,none": 0.18333333333333332,
						"acc_norm_stderr,none": 0.015809921771292746,
						"acc_stderr,none": 0.015809921771292746,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.01321172015861475,
						"acc_stderr,none": 0.01321172015861475,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.172,
						"acc_norm,none": 0.172,
						"acc_norm_stderr,none": 0.011939788882495321,
						"acc_stderr,none": 0.011939788882495321,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.139,
						"acc_norm,none": 0.139,
						"acc_norm_stderr,none": 0.010945263761042975,
						"acc_stderr,none": 0.010945263761042975,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.165,
						"acc_norm,none": 0.165,
						"acc_norm_stderr,none": 0.011743632866916147,
						"acc_stderr,none": 0.011743632866916147,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.024337372337779075,
						"acc_stderr,none": 0.024337372337779075,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440462,
						"acc_stderr,none": 0.013946271849440462,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.116,
						"acc_norm,none": 0.116,
						"acc_norm_stderr,none": 0.010131468138757009,
						"acc_stderr,none": 0.010131468138757009,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.188,
						"acc_norm,none": 0.188,
						"acc_norm_stderr,none": 0.012361586015103758,
						"acc_stderr,none": 0.012361586015103758,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.205,
						"acc_norm,none": 0.205,
						"acc_norm_stderr,none": 0.02861764926136017,
						"acc_stderr,none": 0.02861764926136017,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.161,
						"acc_norm,none": 0.161,
						"acc_norm_stderr,none": 0.011628164696727184,
						"acc_stderr,none": 0.011628164696727184,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.221,
						"acc_norm,none": 0.221,
						"acc_norm_stderr,none": 0.013127502859696228,
						"acc_stderr,none": 0.013127502859696228,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.02936514188266332,
						"acc_stderr,none": 0.02936514188266332,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.173,
						"acc_norm,none": 0.173,
						"acc_norm_stderr,none": 0.011967214137559933,
						"acc_stderr,none": 0.011967214137559933,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5428634071475553,
						"acc_norm,none": 0.564,
						"acc_norm_stderr,none": 0.0004927935871743445,
						"acc_stderr,none": 0.04866440193877248,
						"alias": "kobest",
						"f1,none": 0.47830807821134774,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5947293447293447,
						"acc_stderr,none": 0.013107003869138892,
						"alias": " - kobest_boolq",
						"f1,none": 0.5653536932444174,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.015445859463771305,
						"alias": " - kobest_copa",
						"f1,none": 0.6070947462954648,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.42,
						"acc_norm,none": 0.564,
						"acc_norm_stderr,none": 0.022198954641476802,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.41618890915309636,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5289672544080605,
						"acc_stderr,none": 0.02508374348663252,
						"alias": " - kobest_sentineg",
						"f1,none": 0.4036309595533598,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4865079365079365,
						"acc_stderr,none": 0.014086365971849188,
						"alias": " - kobest_wic",
						"f1,none": 0.32728243459690337,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6993013778381525,
						"acc_stderr,none": 0.027853412339720694,
						"alias": "lambada",
						"perplexity,none": 4.052754931444214,
						"perplexity_stderr,none": 0.4328722276454289
					},
					"lambada_cloze": {
						"acc,none": 0.027653793906462255,
						"acc_stderr,none": 0.008415325074858027,
						"alias": "lambada_cloze",
						"perplexity,none": 917.7022903355223,
						"perplexity_stderr,none": 308.03866610140574
					},
					"lambada_multilingual": {
						"acc,none": 0.5383660003881234,
						"acc_stderr,none": 0.08797230751794759,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.09561131864882,
						"perplexity_stderr,none": 8.32111730106021
					},
					"lambada_openai": {
						"acc,none": 0.7547059965068892,
						"acc_stderr,none": 0.005994382323290705,
						"alias": " - lambada_openai",
						"perplexity,none": 3.204600378904186,
						"perplexity_stderr,none": 0.06189826726959566
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.043857946827091016,
						"acc_stderr,none": 0.002852971485658648,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 305.1671511538923,
						"perplexity_stderr,none": 9.716107300433833
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4261595187269552,
						"acc_stderr,none": 0.006889596071653631,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.80111188051332,
						"perplexity_stderr,none": 1.9361638997666382
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7543178730836406,
						"acc_stderr,none": 0.005997580054014221,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.2042473919426917,
						"perplexity_stderr,none": 0.06190568723253129
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4523578497962352,
						"acc_stderr,none": 0.006934283157219038,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.93927392367145,
						"perplexity_stderr,none": 1.4336845397662734
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5468659033572676,
						"acc_stderr,none": 0.006935309823023543,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.640260043187062,
						"perplexity_stderr,none": 0.8118287232698698
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5121288569765186,
						"acc_stderr,none": 0.006963927837195679,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.89316335392959,
						"perplexity_stderr,none": 1.1646009579773788
					},
					"lambada_standard": {
						"acc,none": 0.6450611294391616,
						"acc_stderr,none": 0.006666368100412562,
						"alias": " - lambada_standard",
						"perplexity,none": 4.900752655364134,
						"perplexity_stderr,none": 0.1065012648640832
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.011449640985833495,
						"acc_stderr,none": 0.0014822020394334552,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 1530.2374295171523,
						"perplexity_stderr,none": 45.426789579790956
					},
					"logiqa": {
						"acc,none": 0.23655913978494625,
						"acc_norm,none": 0.282642089093702,
						"acc_norm_stderr,none": 0.017661585370360625,
						"acc_stderr,none": 0.016668667667174192,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.24491094147582698,
						"acc_norm,none": 0.28498727735368956,
						"acc_norm_stderr,none": 0.011388893410930608,
						"acc_stderr,none": 0.01084963405007422,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2552763819095477,
						"acc_norm,none": 0.26164154103852594,
						"acc_norm_stderr,none": 0.008046139671905348,
						"acc_stderr,none": 0.007981848348968283,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.4484219445032832,
						"acc_stderr,none": 0.005118443571016767,
						"alias": "mc_taco",
						"f1,none": 0.5391150442477876,
						"f1_stderr,none": 0.005680326265169697
					},
					"medmcqa": {
						"acc,none": 0.3480755438680373,
						"acc_norm,none": 0.3480755438680373,
						"acc_norm_stderr,none": 0.0073661978189717885,
						"acc_stderr,none": 0.0073661978189717885,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.35035349567949725,
						"acc_norm,none": 0.35035349567949725,
						"acc_norm_stderr,none": 0.013376676517058362,
						"acc_stderr,none": 0.013376676517058362,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.38221051132317335,
						"acc_stderr,none": 0.07911775550418505,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.039427724440366234,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.362962962962963,
						"acc_stderr,none": 0.04153948404742398,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3618421052631579,
						"acc_stderr,none": 0.03910525752849724,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.38113207547169814,
						"acc_stderr,none": 0.029890609686286634,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3888888888888889,
						"acc_stderr,none": 0.04076663253918567,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.35260115606936415,
						"acc_stderr,none": 0.03643037168958548,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.0433643270799318,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695236,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.35319148936170214,
						"acc_stderr,none": 0.031245325202761926,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.03999423879281337,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.3724137931034483,
						"acc_stderr,none": 0.0402873153294756,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.30687830687830686,
						"acc_stderr,none": 0.023752928712112126,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.04006168083848876,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4161290322580645,
						"acc_stderr,none": 0.028040981380761547,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.29064039408866993,
						"acc_stderr,none": 0.0319474007226554,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5333333333333333,
						"acc_stderr,none": 0.03895658065271846,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4393939393939394,
						"acc_stderr,none": 0.035360859475294805,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5233160621761658,
						"acc_stderr,none": 0.036045136724422014,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3435897435897436,
						"acc_stderr,none": 0.024078696580635477,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2814814814814815,
						"acc_stderr,none": 0.027420019350945277,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.31932773109243695,
						"acc_stderr,none": 0.030283995525884396,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.47339449541284406,
						"acc_stderr,none": 0.02140695268815158,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2361111111111111,
						"acc_stderr,none": 0.028963702570791016,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4852941176470588,
						"acc_stderr,none": 0.035077938347913236,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5527426160337553,
						"acc_stderr,none": 0.03236564251614193,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4260089686098655,
						"acc_stderr,none": 0.03318833286217281,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.45038167938931295,
						"acc_stderr,none": 0.04363643698524779,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.37364505844845913,
						"acc_stderr,none": 0.08226320218357598,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.371900826446281,
						"acc_stderr,none": 0.04412015806624505,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.42592592592592593,
						"acc_stderr,none": 0.0478034362693679,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.37423312883435583,
						"acc_stderr,none": 0.03802068102899615,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841044,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4854368932038835,
						"acc_stderr,none": 0.04948637324026637,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.5683760683760684,
						"acc_stderr,none": 0.0324483553531149,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5517241379310345,
						"acc_stderr,none": 0.01778403453499244,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.36416184971098264,
						"acc_stderr,none": 0.025906632631016117,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24692737430167597,
						"acc_stderr,none": 0.014422292204808855,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.43790849673202614,
						"acc_stderr,none": 0.02840830202033269,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.43321532024460896,
						"acc_stderr,none": 0.07614327485150894,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.5080385852090032,
						"acc_stderr,none": 0.02839442137098453,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4506172839506173,
						"acc_stderr,none": 0.027684721415656196,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.28368794326241137,
						"acc_stderr,none": 0.026891709428343954,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.32659713168187743,
						"acc_stderr,none": 0.011977676704715993,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.35661764705882354,
						"acc_stderr,none": 0.029097209568411945,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3741830065359477,
						"acc_stderr,none": 0.019576953122088837,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.42727272727272725,
						"acc_stderr,none": 0.04738198703545483,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3224489795918367,
						"acc_stderr,none": 0.029923100563683913,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.40558986025349364,
						"acc_stderr,none": 0.06769387954879527,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5223880597014925,
						"acc_stderr,none": 0.035319879302087305,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3219156359023153,
						"acc_stderr,none": 0.06137464307640257,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.05009082659620332,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3614457831325301,
						"acc_stderr,none": 0.037400593820293204,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.5906432748538012,
						"acc_stderr,none": 0.037712831076265434,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7845134997452878,
						"acc_stderr,none": 0.004150373846203005,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7937347436940602,
						"acc_stderr,none": 0.004080861802769053,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7058823529411765,
						"acc_stderr,none": 0.022585489065607783,
						"alias": "mrpc",
						"f1,none": 0.8214285714285714,
						"f1_stderr,none": 0.016019467012018482
					},
					"multimedqa": {
						"acc,none": 0.3697657913413769,
						"acc_norm,none": 0.34706416452176814,
						"acc_norm_stderr,none": 9.989510304536923e-05,
						"acc_stderr,none": 0.06725592922652983,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.531559405940594,
						"acc_stderr,none": 0.007167482732895988,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7074868337968133,
						"mrr_stderr,none": 0.010338017513834615,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.41309255079006774,
						"r@2_stderr,none": 0.016551480902963107
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.659800603683054,
						"mrr_stderr,none": 0.010432496987188691,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4616252821670429,
						"r@2_stderr,none": 0.016757741478801033
					},
					"openbookqa": {
						"acc,none": 0.308,
						"acc_norm,none": 0.422,
						"acc_norm_stderr,none": 0.022109039310618556,
						"acc_stderr,none": 0.0206670329874661,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.426,
						"acc_stderr,none": 0.011059980179945497,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.01085628525162897,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4095,
						"acc_stderr,none": 0.010998425236316448,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5475,
						"acc_stderr,none": 0.011132557743886098,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.551,
						"acc_stderr,none": 0.011124809242874423,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.011135708419359796,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.531,
						"acc_stderr,none": 0.011161621338114474,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48442857142857143,
						"acc_stderr,none": 0.058524570106730234,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7704026115342764,
						"acc_norm,none": 0.7769314472252449,
						"acc_norm_stderr,none": 0.009713057213018522,
						"acc_stderr,none": 0.009812682950815192,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.23494876174210078,
						"acc_norm,none": 0.27444491887275835,
						"acc_norm_stderr,none": 0.0032601376890672735,
						"acc_stderr,none": 0.003097454907944102,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.021683827539286132,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7382745946004722,
						"acc_norm,none": 0.6260143595341415,
						"acc_norm_stderr,none": 0.010390231880828323,
						"acc_stderr,none": 0.15635091600076567,
						"alias": "pythia",
						"bits_per_byte,none": 0.636652864431621,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5547179366814612,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.204600378904186,
						"perplexity_stderr,none": 0.06189826726959566,
						"word_perplexity,none": 10.588818863224049,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3953900709219858,
						"acc_norm,none": 0.44858156028368795,
						"acc_norm_stderr,none": 0.06143650017963884,
						"acc_stderr,none": 0.046367380633439884,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4583333333333333,
						"acc_norm,none": 0.5666666666666667,
						"acc_norm_stderr,none": 0.04542567625794981,
						"acc_stderr,none": 0.04567549854280213,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.34375,
						"acc_norm,none": 0.44375,
						"acc_norm_stderr,none": 0.039400853796259426,
						"acc_stderr,none": 0.03766668927755763,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.397887323943662,
						"acc_norm,none": 0.4014084507042254,
						"acc_norm_stderr,none": 0.029138375022747656,
						"acc_stderr,none": 0.029095492917064907,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.723571605243631,
						"acc_stderr,none": 0.0022242601654827734,
						"alias": "qqp",
						"f1,none": 0.711914213538176,
						"f1_stderr,none": 0.0026080646085025858
					},
					"race": {
						"acc,none": 0.3521531100478469,
						"acc_stderr,none": 0.014782629897202259,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.272,
						"em_stderr,none": 0.004450121386888209,
						"f1,none": 0.28156190499961375,
						"f1_stderr,none": 0.004460122755364622
					},
					"rte": {
						"acc,none": 0.6787003610108303,
						"acc_stderr,none": 0.02810862605328869,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.933,
						"acc_norm,none": 0.914,
						"acc_norm_stderr,none": 0.008870325962594766,
						"acc_stderr,none": 0.007910345983177549,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6787003610108303,
						"acc_stderr,none": 0.02810862605328869,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8761467889908257,
						"acc_stderr,none": 0.011161768083128796,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5828251524542637,
						"acc_norm,none": 0.770518844346696,
						"acc_norm_stderr,none": 0.0029730065675431998,
						"acc_stderr,none": 0.003486253177229567,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6524242121726398,
						"acc_stderr,none": 0.07222213929458608,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5728165064102564,
						"acc_stderr,none": 0.004950903462610217,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8427080166210601,
						"acc_stderr,none": 0.003665399375091928,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5462745098039216,
						"acc_stderr,none": 0.004929731448702554,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.33295656338709595,
						"acc_stderr,none": 0.0016609424733593937,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3353733170134639,
						"bleu_acc_stderr,none": 0.016527534039668987,
						"bleu_diff,none": -5.412376582562824,
						"bleu_diff_stderr,none": 0.8863697530823587,
						"bleu_max,none": 28.083021259886035,
						"bleu_max_stderr,none": 0.8038601307574218,
						"rouge1_acc,none": 0.29865361077111385,
						"rouge1_acc_stderr,none": 0.016021570613768542,
						"rouge1_diff,none": -7.1550465399807495,
						"rouge1_diff_stderr,none": 0.9939764518093742,
						"rouge1_max,none": 53.377525795884935,
						"rouge1_max_stderr,none": 0.864181273769323,
						"rouge2_acc,none": 0.27906976744186046,
						"rouge2_acc_stderr,none": 0.0157021070906279,
						"rouge2_diff,none": -8.469230330133747,
						"rouge2_diff_stderr,none": 1.1878707267198763,
						"rouge2_max,none": 38.028606402210094,
						"rouge2_max_stderr,none": 1.0166979101890905,
						"rougeL_acc,none": 0.2998776009791922,
						"rougeL_acc_stderr,none": 0.016040352966713623,
						"rougeL_diff,none": -7.235599218628369,
						"rougeL_diff_stderr,none": 1.0097982578621894,
						"rougeL_max,none": 50.73055876421228,
						"rougeL_max_stderr,none": 0.8819793717714633
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3353733170134639,
						"bleu_acc_stderr,none": 0.016527534039668987,
						"bleu_diff,none": -5.412376582562824,
						"bleu_diff_stderr,none": 0.8863697530823587,
						"bleu_max,none": 28.083021259886035,
						"bleu_max_stderr,none": 0.8038601307574218,
						"rouge1_acc,none": 0.29865361077111385,
						"rouge1_acc_stderr,none": 0.016021570613768542,
						"rouge1_diff,none": -7.1550465399807495,
						"rouge1_diff_stderr,none": 0.9939764518093742,
						"rouge1_max,none": 53.377525795884935,
						"rouge1_max_stderr,none": 0.864181273769323,
						"rouge2_acc,none": 0.27906976744186046,
						"rouge2_acc_stderr,none": 0.0157021070906279,
						"rouge2_diff,none": -8.469230330133747,
						"rouge2_diff_stderr,none": 1.1878707267198763,
						"rouge2_max,none": 38.028606402210094,
						"rouge2_max_stderr,none": 1.0166979101890905,
						"rougeL_acc,none": 0.2998776009791922,
						"rougeL_acc_stderr,none": 0.016040352966713623,
						"rougeL_diff,none": -7.235599218628369,
						"rougeL_diff_stderr,none": 1.0097982578621894,
						"rougeL_max,none": 50.73055876421228,
						"rougeL_max_stderr,none": 0.8819793717714633
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25703794369645044,
						"acc_stderr,none": 0.015298077509485083,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4088751830777415,
						"acc_stderr,none": 0.014301719892298751,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.04084645669291338,
						"exact_match_stderr,none": 0.004392040138400883
					},
					"wic": {
						"acc,none": 0.5329153605015674,
						"acc_stderr,none": 0.019767747983778065,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.636652864431621,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5547179366814612,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.588818863224049,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7292817679558011,
						"acc_stderr,none": 0.012487904760626304,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.43661971830985913,
						"acc_stderr,none": 0.0592793555841297,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.4230769230769231,
						"acc_stderr,none": 0.048679937479186836,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8608058608058609,
						"acc_stderr,none": 0.020988366070851,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6261818181818183,
						"acc_stderr,none": 0.06980456365734454,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.02191237788577997,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044296,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.020011219298073528,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.01953692357474761,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.562,
						"acc_stderr,none": 0.022210326363977413,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.022094713229761784,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.022033677993740862,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.644,
						"acc_stderr,none": 0.021434712356072652,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.02035437548053008,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.02055326917420918,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.436091030789826,
						"acc_stderr,none": 0.05246585678355937,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3369477911646586,
						"acc_stderr,none": 0.009474203778757708,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4819277108433735,
						"acc_stderr,none": 0.010015524156629808,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4783132530120482,
						"acc_stderr,none": 0.010012641367065514,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3815261044176707,
						"acc_stderr,none": 0.009736668133098172,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5433734939759036,
						"acc_stderr,none": 0.009984293410840306,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5016064257028112,
						"acc_stderr,none": 0.010022021141102096,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4903614457831325,
						"acc_stderr,none": 0.0100202105584383,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.40763052208835343,
						"acc_stderr,none": 0.009849569202733735,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4963855421686747,
						"acc_stderr,none": 0.010021811000966357,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39036144578313253,
						"acc_stderr,none": 0.009778161879954582,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41927710843373495,
						"acc_stderr,none": 0.009890599137391931,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4389558232931727,
						"acc_stderr,none": 0.009947100105978365,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41164658634538154,
						"acc_stderr,none": 0.009864360821750346,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41485943775100403,
						"acc_stderr,none": 0.009875705744164678,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3481927710843373,
						"acc_stderr,none": 0.009548980649153382,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6293243487154804,
						"acc_stderr,none": 0.06499818020097718,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5764394440767704,
						"acc_stderr,none": 0.01271587138288143,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7888815354070152,
						"acc_stderr,none": 0.010502205965083534,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7180675049636003,
						"acc_stderr,none": 0.011578884735064793,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5658504301786896,
						"acc_stderr,none": 0.012755046289912218,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.599602911978822,
						"acc_stderr,none": 0.01260923817555117,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.671078755790867,
						"acc_stderr,none": 0.012090499234239533,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5407015221707479,
						"acc_stderr,none": 0.012824422739625588,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6843150231634679,
						"acc_stderr,none": 0.011960973299680223,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5565850430178689,
						"acc_stderr,none": 0.0127844621366572,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5876902713434812,
						"acc_stderr,none": 0.01266769412239704,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6333553937789543,
						"acc_stderr,none": 0.012401034429990696,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8174870757473589,
						"acc_stderr,none": 0.038517432971331256,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.006740838111907546,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6867469879518072,
						"acc_stderr,none": 0.051219942106581456,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7476538060479666,
						"acc_stderr,none": 0.01403349677309752,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7870722433460076,
						"acc_stderr,none": 0.025291395445662838,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6857142857142857,
						"acc_stderr,none": 0.026198057744026407,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7817460317460317,
						"acc_stderr,none": 0.018417468024139707,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/RWKV-32K-5B-RW_pth"
	},
	"./rwkv-x-dev/RWKV-5-World-1B5-v2-20231025-ctx4096": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"anli": {
						"acc,none": 0.344375,
						"acc_stderr,none": 0.016214535725893844,
						"alias": "anli"
					}
				},
				"results": {
					"anli": {
						"acc,none": 0.344375,
						"acc_stderr,none": 0.016214535725893844,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.015167928865407557,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.014876872027456732,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.013728421539454876,
						"alias": " - anli_r3"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/RWKV-5-World-1B5-v2-20231025-ctx4096"
	},
	"./rwkv-x-dev/RWKV-5-World-3B-v2-20231118-ctx16k": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"anli": {
						"acc,none": 0.3475,
						"acc_stderr,none": 0.014733637524722431,
						"alias": "anli"
					}
				},
				"results": {
					"anli": {
						"acc,none": 0.3475,
						"acc_stderr,none": 0.014733637524722431,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.352,
						"acc_stderr,none": 0.015110404505648666,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.346,
						"acc_stderr,none": 0.015050266127564448,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.013728421539454878,
						"alias": " - anli_r3"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/RWKV-5-World-3B-v2-20231118-ctx16k"
	},
	"./rwkv-x-dev/RWKV-5-World-7B-v2-20240128-ctx4096": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"anli": {
						"acc,none": 0.3590625,
						"acc_stderr,none": 0.017704453505961715,
						"alias": "anli"
					}
				},
				"results": {
					"anli": {
						"acc,none": 0.3590625,
						"acc_stderr,none": 0.017704453505961715,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.015356947477797658,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.015039986742055365,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.35333333333333333,
						"acc_stderr,none": 0.013804572162314963,
						"alias": " - anli_r3"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/RWKV-5-World-7B-v2-20240128-ctx4096"
	},
	"./rwkv-x-dev/blink4-final_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6307779030439684,
						"acc_norm,none": 0.6296505073280722,
						"acc_norm_stderr,none": 0.09456177238890466,
						"acc_stderr,none": 0.10743947566063969,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.355,
						"acc_stderr,none": 0.01991587930845683,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.837865671641791,
						"acc_stderr,none": 0.15269610307865172,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.3060783975133829,
						"acc_norm,none": 0.3060783975133829,
						"acc_norm_stderr,none": 0.05964874273376981,
						"acc_stderr,none": 0.05964874273376981,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.541567413053835,
						"acc_stderr,none": 0.012686230524656401,
						"alias": "glue",
						"f1,none": 0.6660840132052501,
						"f1_stderr,none": 0.0002490572630158045,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.7156995924704056,
						"acc_stderr,none": 0.017918705868328787,
						"alias": "lambada",
						"perplexity,none": 3.7825564005254915,
						"perplexity_stderr,none": 0.24176388512812
					},
					"lambada_multilingual": {
						"acc,none": 0.5417426741703861,
						"acc_stderr,none": 0.08480918553874013,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.984209978943372,
						"perplexity_stderr,none": 8.241857583540895
					},
					"mmlu": {
						"acc,none": 0.323885486397949,
						"acc_stderr,none": 0.06413587939631772,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.31583421891604674,
						"acc_stderr,none": 0.053843727243415865,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3714193756034761,
						"acc_stderr,none": 0.05166478932586059,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.34644133896652585,
						"acc_stderr,none": 0.06350781839184878,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2670472565810339,
						"acc_stderr,none": 0.06357982586650601,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.46364285714285713,
						"acc_stderr,none": 0.04624333088715949,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7423445847541348,
						"acc_norm,none": 0.6334058628103801,
						"acc_norm_stderr,none": 0.01088982807449506,
						"acc_stderr,none": 0.14541590059671017,
						"alias": "pythia",
						"bits_per_byte,none": 0.6373169038932425,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5554337024271663,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3273346993629196,
						"perplexity_stderr,none": 0.06548894214557961,
						"word_perplexity,none": 10.61491331814627,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.32668663944045867,
						"acc_stderr,none": 0.0015161922166166773,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3219094247246022,
						"bleu_acc_stderr,none": 0.0163555676119604,
						"bleu_diff,none": -6.52307377852992,
						"bleu_diff_stderr,none": 0.882608139760819,
						"bleu_max,none": 28.190731362000818,
						"bleu_max_stderr,none": 0.8192896472333284,
						"rouge1_acc,none": 0.2913096695226438,
						"rouge1_acc_stderr,none": 0.015905987048184828,
						"rouge1_diff,none": -8.584442495314844,
						"rouge1_diff_stderr,none": 0.9666373382314315,
						"rouge1_max,none": 53.62870155203736,
						"rouge1_max_stderr,none": 0.8686447297916212,
						"rouge2_acc,none": 0.27539779681762544,
						"rouge2_acc_stderr,none": 0.01563813566777552,
						"rouge2_diff,none": -10.645773648157844,
						"rouge2_diff_stderr,none": 1.1680622792150868,
						"rouge2_max,none": 37.66322635174381,
						"rouge2_max_stderr,none": 1.0339594500148117,
						"rougeL_acc,none": 0.29498164014687883,
						"rougeL_acc_stderr,none": 0.015964400965589674,
						"rougeL_diff,none": -8.947157840870293,
						"rougeL_diff_stderr,none": 0.9835913993380507,
						"rougeL_max,none": 50.7677058099679,
						"rougeL_max_stderr,none": 0.8879196339431965
					},
					"xcopa": {
						"acc,none": 0.6221818181818182,
						"acc_stderr,none": 0.07232487995885858,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43493975903614457,
						"acc_stderr,none": 0.04839382752943621,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6323325913001624,
						"acc_stderr,none": 0.05339216973724912,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8118678354686446,
						"acc_stderr,none": 0.036824418228777196,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6307779030439684,
						"acc_norm,none": 0.6296505073280722,
						"acc_norm_stderr,none": 0.09456177238890466,
						"acc_stderr,none": 0.10743947566063969,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.355,
						"acc_stderr,none": 0.01991587930845683,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.384,
						"acc_stderr,none": 0.015387682761897068,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.339,
						"acc_stderr,none": 0.014976758771620342,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3441666666666667,
						"acc_stderr,none": 0.013720551062295756,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4035836177474403,
						"acc_norm,none": 0.4300341296928328,
						"acc_norm_stderr,none": 0.014467631559137996,
						"acc_stderr,none": 0.014337158914268447,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7428451178451179,
						"acc_norm,none": 0.7281144781144782,
						"acc_norm_stderr,none": 0.009129795867310496,
						"acc_stderr,none": 0.00896839476897199,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.837865671641791,
						"acc_stderr,none": 0.15269610307865172,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942307,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565894,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.011800434324644603,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724446,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.789,
						"acc_stderr,none": 0.012909130321042094,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.601,
						"acc_stderr,none": 0.015493193313162908,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877366,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504411,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448817,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163043,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.0061998740663370645,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389617,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.00833333333333336,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177546,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178374,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832047,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.795,
						"acc_stderr,none": 0.01277255409611312,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.758,
						"acc_stderr,none": 0.013550631705555953,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.772,
						"acc_stderr,none": 0.013273740700804483,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.0077997330618320235,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.01123486636423524,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689097,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.409,
						"acc_stderr,none": 0.015555094373257942,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381791,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.012583693787968135,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.675,
						"acc_stderr,none": 0.014818724459095527,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.837,
						"acc_stderr,none": 0.011686212712746839,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139973,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474921,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996693,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.613,
						"acc_stderr,none": 0.01541001195549393,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946085,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.015797897758042766,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.01552498067712258,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.699,
						"acc_stderr,none": 0.014512395033543153,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.00891686663074592,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837957,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847167,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.00944924802766276,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.01253523562331932,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469428,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244071,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.01263164908309918,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.531,
						"acc_stderr,none": 0.015788865959539017,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.0053971408290992,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592076,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.003444977194099844,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.739,
						"acc_stderr,none": 0.013895037677965126,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.477,
						"acc_stderr,none": 0.015802554246726105,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837961,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698459,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.634,
						"acc_stderr,none": 0.015240612726405745,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.01107581480856704,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024973,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.812,
						"acc_stderr,none": 0.012361586015103765,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992434,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406728,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832049,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.0042063872496114945,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099224,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.388,
						"acc_stderr,none": 0.015417317979911076,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.015039986742055238,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.3060783975133829,
						"acc_norm,none": 0.3060783975133829,
						"acc_norm_stderr,none": 0.05964874273376981,
						"acc_stderr,none": 0.05964874273376981,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.3609467455621302,
						"acc_norm,none": 0.3609467455621302,
						"acc_norm_stderr,none": 0.037054030083353624,
						"acc_stderr,none": 0.037054030083353624,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.038610038610038595,
						"acc_stderr,none": 0.038610038610038595,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.03334645408665336,
						"acc_stderr,none": 0.03334645408665336,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.41875,
						"acc_norm,none": 0.41875,
						"acc_norm_stderr,none": 0.039125538756915115,
						"acc_stderr,none": 0.039125538756915115,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.03501438706296781,
						"acc_stderr,none": 0.03501438706296781,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3492822966507177,
						"acc_norm,none": 0.3492822966507177,
						"acc_norm_stderr,none": 0.033056200243000905,
						"acc_stderr,none": 0.033056200243000905,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.03311643267635493,
						"acc_stderr,none": 0.03311643267635493,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.33587786259541985,
						"acc_norm,none": 0.33587786259541985,
						"acc_norm_stderr,none": 0.04142313771996665,
						"acc_stderr,none": 0.04142313771996665,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3161764705882353,
						"acc_norm,none": 0.3161764705882353,
						"acc_norm_stderr,none": 0.040019338846834944,
						"acc_stderr,none": 0.040019338846834944,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.29906542056074764,
						"acc_norm,none": 0.29906542056074764,
						"acc_norm_stderr,none": 0.044470182376718334,
						"acc_stderr,none": 0.044470182376718334,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.34365325077399383,
						"acc_norm,none": 0.34365325077399383,
						"acc_norm_stderr,none": 0.026466649235579315,
						"acc_stderr,none": 0.026466649235579315,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.30392156862745096,
						"acc_norm,none": 0.30392156862745096,
						"acc_norm_stderr,none": 0.03228210387037894,
						"acc_stderr,none": 0.03228210387037894,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.329608938547486,
						"acc_norm,none": 0.329608938547486,
						"acc_norm_stderr,none": 0.03523332230992217,
						"acc_stderr,none": 0.03523332230992217,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2742616033755274,
						"acc_norm,none": 0.2742616033755274,
						"acc_norm_stderr,none": 0.029041333510598035,
						"acc_stderr,none": 0.029041333510598035,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.1792452830188679,
						"acc_norm,none": 0.1792452830188679,
						"acc_norm_stderr,none": 0.03743138631255278,
						"acc_stderr,none": 0.03743138631255278,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.411214953271028,
						"acc_norm,none": 0.411214953271028,
						"acc_norm_stderr,none": 0.047792516928013694,
						"acc_stderr,none": 0.047792516928013694,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.39622641509433965,
						"acc_norm,none": 0.39622641509433965,
						"acc_norm_stderr,none": 0.047732492983673595,
						"acc_stderr,none": 0.047732492983673595,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.23148148148148148,
						"acc_norm,none": 0.23148148148148148,
						"acc_norm_stderr,none": 0.04077494709252628,
						"acc_stderr,none": 0.04077494709252628,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.042857142857142844,
						"acc_stderr,none": 0.042857142857142844,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.20754716981132076,
						"acc_norm,none": 0.20754716981132076,
						"acc_norm_stderr,none": 0.03957769238377933,
						"acc_stderr,none": 0.03957769238377933,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2600732600732601,
						"acc_norm,none": 0.2600732600732601,
						"acc_norm_stderr,none": 0.026598537627601455,
						"acc_stderr,none": 0.026598537627601455,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.03320574612945431,
						"acc_stderr,none": 0.03320574612945431,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.036155076303109344,
						"acc_stderr,none": 0.036155076303109344,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.03711513959675176,
						"acc_stderr,none": 0.03711513959675176,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.302158273381295,
						"acc_norm,none": 0.302158273381295,
						"acc_norm_stderr,none": 0.039089144792915614,
						"acc_stderr,none": 0.039089144792915614,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.34591194968553457,
						"acc_norm,none": 0.34591194968553457,
						"acc_norm_stderr,none": 0.037841848841408295,
						"acc_stderr,none": 0.037841848841408295,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3312883435582822,
						"acc_norm,none": 0.3312883435582822,
						"acc_norm_stderr,none": 0.03697983910025588,
						"acc_stderr,none": 0.03697983910025588,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.3081395348837209,
						"acc_norm,none": 0.3081395348837209,
						"acc_norm_stderr,none": 0.03530895898152281,
						"acc_stderr,none": 0.03530895898152281,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.02839429305079051,
						"acc_stderr,none": 0.02839429305079051,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2878787878787879,
						"acc_norm,none": 0.2878787878787879,
						"acc_norm_stderr,none": 0.03225883512300993,
						"acc_stderr,none": 0.03225883512300993,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.39915966386554624,
						"acc_norm,none": 0.39915966386554624,
						"acc_norm_stderr,none": 0.03181110032413925,
						"acc_stderr,none": 0.03181110032413925,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26956521739130435,
						"acc_norm,none": 0.26956521739130435,
						"acc_norm_stderr,none": 0.02932276422894953,
						"acc_stderr,none": 0.02932276422894953,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.31851851851851853,
						"acc_norm,none": 0.31851851851851853,
						"acc_norm_stderr,none": 0.040247784019771124,
						"acc_stderr,none": 0.040247784019771124,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.32867132867132864,
						"acc_norm,none": 0.32867132867132864,
						"acc_norm_stderr,none": 0.03941888501263192,
						"acc_stderr,none": 0.03941888501263192,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2840909090909091,
						"acc_norm,none": 0.2840909090909091,
						"acc_norm_stderr,none": 0.034090909090909075,
						"acc_stderr,none": 0.034090909090909075,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3087248322147651,
						"acc_norm,none": 0.3087248322147651,
						"acc_norm_stderr,none": 0.037973480272130815,
						"acc_stderr,none": 0.037973480272130815,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.03279317792268948,
						"acc_stderr,none": 0.03279317792268948,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552485,
						"acc_stderr,none": 0.03703667194552485,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3220338983050847,
						"acc_norm,none": 0.3220338983050847,
						"acc_norm_stderr,none": 0.043197822302613424,
						"acc_stderr,none": 0.043197822302613424,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.19090909090909092,
						"acc_norm,none": 0.19090909090909092,
						"acc_norm_stderr,none": 0.03764425585984924,
						"acc_stderr,none": 0.03764425585984924,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695623,
						"acc_stderr,none": 0.03737392962695623,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.04073524322147126,
						"acc_stderr,none": 0.04073524322147126,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741764,
						"acc_stderr,none": 0.03351597731741764,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3546511627906977,
						"acc_norm,none": 0.3546511627906977,
						"acc_norm_stderr,none": 0.03658473425938542,
						"acc_stderr,none": 0.03658473425938542,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2895377128953771,
						"acc_norm,none": 0.2895377128953771,
						"acc_norm_stderr,none": 0.022399130302514076,
						"acc_stderr,none": 0.022399130302514076,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.43457943925233644,
						"acc_norm,none": 0.43457943925233644,
						"acc_norm_stderr,none": 0.033964919089940517,
						"acc_stderr,none": 0.033964919089940517,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.23577235772357724,
						"acc_norm,none": 0.23577235772357724,
						"acc_norm_stderr,none": 0.038430664952148384,
						"acc_stderr,none": 0.038430664952148384,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3360655737704918,
						"acc_norm,none": 0.3360655737704918,
						"acc_norm_stderr,none": 0.04294196582481048,
						"acc_stderr,none": 0.04294196582481048,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.319047619047619,
						"acc_norm,none": 0.319047619047619,
						"acc_norm_stderr,none": 0.032241332489624665,
						"acc_stderr,none": 0.032241332489624665,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.34444444444444444,
						"acc_norm,none": 0.34444444444444444,
						"acc_norm_stderr,none": 0.035517126967439826,
						"acc_stderr,none": 0.035517126967439826,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3439153439153439,
						"acc_norm,none": 0.3439153439153439,
						"acc_norm_stderr,none": 0.0346439012574329,
						"acc_stderr,none": 0.0346439012574329,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.32413793103448274,
						"acc_norm,none": 0.32413793103448274,
						"acc_norm_stderr,none": 0.03900432069185555,
						"acc_stderr,none": 0.03900432069185555,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.04803844614152614,
						"acc_stderr,none": 0.04803844614152614,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2742857142857143,
						"acc_norm,none": 0.2742857142857143,
						"acc_norm_stderr,none": 0.033822819375172945,
						"acc_stderr,none": 0.033822819375172945,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846655,
						"acc_stderr,none": 0.030469670650846655,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.02251703243459229,
						"acc_stderr,none": 0.02251703243459229,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.38362068965517243,
						"acc_norm,none": 0.38362068965517243,
						"acc_norm_stderr,none": 0.03199403733163039,
						"acc_stderr,none": 0.03199403733163039,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3390804597701149,
						"acc_norm,none": 0.3390804597701149,
						"acc_norm_stderr,none": 0.03599172203897236,
						"acc_stderr,none": 0.03599172203897236,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03785714465066653,
						"acc_stderr,none": 0.03785714465066653,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.33185840707964603,
						"acc_norm,none": 0.33185840707964603,
						"acc_norm_stderr,none": 0.03139203046282125,
						"acc_stderr,none": 0.03139203046282125,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3212121212121212,
						"acc_norm,none": 0.3212121212121212,
						"acc_norm_stderr,none": 0.03646204963253812,
						"acc_stderr,none": 0.03646204963253812,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741764,
						"acc_stderr,none": 0.03351597731741764,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.31952662721893493,
						"acc_norm,none": 0.31952662721893493,
						"acc_norm_stderr,none": 0.03597530251676526,
						"acc_stderr,none": 0.03597530251676526,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3105590062111801,
						"acc_norm,none": 0.3105590062111801,
						"acc_norm_stderr,none": 0.03658142543288738,
						"acc_stderr,none": 0.03658142543288738,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.034621578458651416,
						"acc_stderr,none": 0.034621578458651416,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.03487350880197771,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.541567413053835,
						"acc_stderr,none": 0.012686230524656401,
						"alias": "glue",
						"f1,none": 0.6660840132052501,
						"f1_stderr,none": 0.0002490572630158045,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.5238996215893248,
						"acc_norm,none": 0.7018522206731727,
						"acc_norm_stderr,none": 0.004565098421085229,
						"acc_stderr,none": 0.004984077906216102,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7156995924704056,
						"acc_stderr,none": 0.017918705868328787,
						"alias": "lambada",
						"perplexity,none": 3.7825564005254915,
						"perplexity_stderr,none": 0.24176388512812
					},
					"lambada_multilingual": {
						"acc,none": 0.5417426741703861,
						"acc_stderr,none": 0.08480918553874013,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.984209978943372,
						"perplexity_stderr,none": 8.241857583540895
					},
					"lambada_openai": {
						"acc,none": 0.7473316514651659,
						"acc_stderr,none": 0.006054024606254286,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3273346993629196,
						"perplexity_stderr,none": 0.06548894214557961
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4288763826896953,
						"acc_stderr,none": 0.0068951417412077875,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.20000700399246,
						"perplexity_stderr,none": 1.8957136900662221
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7475257131767902,
						"acc_stderr,none": 0.006052484945314857,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3270060483529855,
						"perplexity_stderr,none": 0.06549650816924392
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4593440714147099,
						"acc_stderr,none": 0.006942911275397646,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.648969171683188,
						"perplexity_stderr,none": 1.467511227293892
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5526877547059965,
						"acc_stderr,none": 0.0069271945939575275,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.29615579933966,
						"perplexity_stderr,none": 0.7906458936470977
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.520279448864739,
						"acc_stderr,none": 0.006960245698059409,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.448911871348567,
						"perplexity_stderr,none": 1.134315447110098
					},
					"lambada_standard": {
						"acc,none": 0.6821269163594023,
						"acc_stderr,none": 0.006487412955192986,
						"alias": " - lambada_standard",
						"perplexity,none": 4.239429282186711,
						"perplexity_stderr,none": 0.09071319955084829
					},
					"logiqa": {
						"acc,none": 0.25499231950844853,
						"acc_norm,none": 0.2749615975422427,
						"acc_norm_stderr,none": 0.017512971782225207,
						"acc_stderr,none": 0.017095714105279818,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.323885486397949,
						"acc_stderr,none": 0.06413587939631772,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.17,
						"acc_stderr,none": 0.0377525168068637,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.35555555555555557,
						"acc_stderr,none": 0.04135176749720386,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.24342105263157895,
						"acc_stderr,none": 0.034923496688842384,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4188679245283019,
						"acc_stderr,none": 0.03036505082911521,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3194444444444444,
						"acc_stderr,none": 0.03899073687357335,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932268,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.042295258468165044,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3583815028901734,
						"acc_stderr,none": 0.03656343653353158,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.20588235294117646,
						"acc_stderr,none": 0.04023382273617746,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3276595744680851,
						"acc_stderr,none": 0.030683020843231008,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.040493392977481425,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2827586206896552,
						"acc_stderr,none": 0.03752833958003337,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.21428571428571427,
						"acc_stderr,none": 0.02113285918275443,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.040061680838488774,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.38064516129032255,
						"acc_stderr,none": 0.027621717832907032,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.24630541871921183,
						"acc_stderr,none": 0.030315099285617743,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3939393939393939,
						"acc_stderr,none": 0.0381549430868893,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.41919191919191917,
						"acc_stderr,none": 0.03515520728670417,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.44559585492227977,
						"acc_stderr,none": 0.03587014986075659,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3076923076923077,
						"acc_stderr,none": 0.0234009289183105,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.02606715922227579,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2605042016806723,
						"acc_stderr,none": 0.028510251512341933,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.26490066225165565,
						"acc_stderr,none": 0.03603038545360381,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3669724770642202,
						"acc_stderr,none": 0.02066467565952053,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.14814814814814814,
						"acc_stderr,none": 0.024227629273728363,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.44607843137254904,
						"acc_stderr,none": 0.034888454513049734,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.4219409282700422,
						"acc_stderr,none": 0.032148146302403695,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3632286995515695,
						"acc_stderr,none": 0.032277904428505,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.42748091603053434,
						"acc_stderr,none": 0.04338920305792401,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.31583421891604674,
						"acc_stderr,none": 0.053843727243415865,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.36363636363636365,
						"acc_stderr,none": 0.043913262867240704,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04557239513497752,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3067484662576687,
						"acc_stderr,none": 0.036230899157241474,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.04432804055291517,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4077669902912621,
						"acc_stderr,none": 0.04865777570410769,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.37606837606837606,
						"acc_stderr,none": 0.03173393632969481,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.42656449553001274,
						"acc_stderr,none": 0.01768606697567564,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.33815028901734107,
						"acc_stderr,none": 0.02546977014940017,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574906,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3104575163398693,
						"acc_stderr,none": 0.0264930332251459,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3714193756034761,
						"acc_stderr,none": 0.05166478932586059,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.38263665594855306,
						"acc_stderr,none": 0.027604689028581996,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3549382716049383,
						"acc_stderr,none": 0.02662415247884585,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25177304964539005,
						"acc_stderr,none": 0.025892151156709405,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2835723598435463,
						"acc_stderr,none": 0.011511900775968316,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.38235294117647056,
						"acc_stderr,none": 0.02952009569768776,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.31862745098039214,
						"acc_stderr,none": 0.018850084696468705,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.45454545454545453,
						"acc_stderr,none": 0.04769300568972742,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.24081632653061225,
						"acc_stderr,none": 0.027372942201788156,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.34644133896652585,
						"acc_stderr,none": 0.06350781839184878,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.44776119402985076,
						"acc_stderr,none": 0.03516184772952165,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2670472565810339,
						"acc_stderr,none": 0.06357982586650601,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3253012048192771,
						"acc_stderr,none": 0.03647168523683227,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.38596491228070173,
						"acc_stderr,none": 0.03733756969066164,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3455934793683138,
						"acc_stderr,none": 0.004800467701748957,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.342860048820179,
						"acc_stderr,none": 0.0047872766872064714,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7328431372549019,
						"acc_stderr,none": 0.021932668544150206,
						"alias": " - mrpc",
						"f1,none": 0.8340943683409436,
						"f1_stderr,none": 0.015665614274445003
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.051800554016620495,
						"exact_match_stderr,remove_whitespace": 0.003689127030731772
					},
					"openbookqa": {
						"acc,none": 0.302,
						"acc_norm,none": 0.416,
						"acc_norm_stderr,none": 0.02206494331392887,
						"acc_stderr,none": 0.020553269174209205,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.421,
						"acc_stderr,none": 0.011042665902539781,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3795,
						"acc_stderr,none": 0.010853514379554391,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3785,
						"acc_stderr,none": 0.010847935926107406,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.01113148485052578,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.531,
						"acc_stderr,none": 0.011161621338114477,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.4815,
						"acc_stderr,none": 0.011175478542788582,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.01118233080628221,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.46364285714285713,
						"acc_stderr,none": 0.04624333088715949,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7687704026115343,
						"acc_norm,none": 0.7742110990206746,
						"acc_norm_stderr,none": 0.009754980670917325,
						"acc_stderr,none": 0.009837063180625326,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7423445847541348,
						"acc_norm,none": 0.6334058628103801,
						"acc_norm_stderr,none": 0.01088982807449506,
						"acc_stderr,none": 0.14541590059671017,
						"alias": "pythia",
						"bits_per_byte,none": 0.6373169038932425,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5554337024271663,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3273346993629196,
						"perplexity_stderr,none": 0.06548894214557961,
						"word_perplexity,none": 10.61491331814627,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49569833424858134,
						"acc_stderr,none": 0.006765160168388145,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6336136532278012,
						"acc_stderr,none": 0.002396268090409224,
						"alias": " - qqp",
						"f1,none": 0.6646290384658924,
						"f1_stderr,none": 0.00259554515528604
					},
					"record": {
						"alias": "record",
						"em,none": 0.2727,
						"em_stderr,none": 0.004453701218700751,
						"f1,none": 0.28268857167363165,
						"f1_stderr,none": 0.00446420750423061
					},
					"rte": {
						"acc,none": 0.631768953068592,
						"acc_stderr,none": 0.029032524428023707,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.951,
						"acc_norm,none": 0.931,
						"acc_norm_stderr,none": 0.008018934050315165,
						"acc_stderr,none": 0.0068297617561409235,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.8979357798165137,
						"acc_stderr,none": 0.010257707759295616,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.32668663944045867,
						"acc_stderr,none": 0.0015161922166166773,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3219094247246022,
						"bleu_acc_stderr,none": 0.0163555676119604,
						"bleu_diff,none": -6.52307377852992,
						"bleu_diff_stderr,none": 0.882608139760819,
						"bleu_max,none": 28.190731362000818,
						"bleu_max_stderr,none": 0.8192896472333284,
						"rouge1_acc,none": 0.2913096695226438,
						"rouge1_acc_stderr,none": 0.015905987048184828,
						"rouge1_diff,none": -8.584442495314844,
						"rouge1_diff_stderr,none": 0.9666373382314315,
						"rouge1_max,none": 53.62870155203736,
						"rouge1_max_stderr,none": 0.8686447297916212,
						"rouge2_acc,none": 0.27539779681762544,
						"rouge2_acc_stderr,none": 0.01563813566777552,
						"rouge2_diff,none": -10.645773648157844,
						"rouge2_diff_stderr,none": 1.1680622792150868,
						"rouge2_max,none": 37.66322635174381,
						"rouge2_max_stderr,none": 1.0339594500148117,
						"rougeL_acc,none": 0.29498164014687883,
						"rougeL_acc_stderr,none": 0.015964400965589674,
						"rougeL_diff,none": -8.947157840870293,
						"rougeL_diff_stderr,none": 0.9835913993380507,
						"rougeL_max,none": 50.7677058099679,
						"rougeL_max_stderr,none": 0.8879196339431965
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3219094247246022,
						"bleu_acc_stderr,none": 0.0163555676119604,
						"bleu_diff,none": -6.52307377852992,
						"bleu_diff_stderr,none": 0.882608139760819,
						"bleu_max,none": 28.190731362000818,
						"bleu_max_stderr,none": 0.8192896472333284,
						"rouge1_acc,none": 0.2913096695226438,
						"rouge1_acc_stderr,none": 0.015905987048184828,
						"rouge1_diff,none": -8.584442495314844,
						"rouge1_diff_stderr,none": 0.9666373382314315,
						"rouge1_max,none": 53.62870155203736,
						"rouge1_max_stderr,none": 0.8686447297916212,
						"rouge2_acc,none": 0.27539779681762544,
						"rouge2_acc_stderr,none": 0.01563813566777552,
						"rouge2_diff,none": -10.645773648157844,
						"rouge2_diff_stderr,none": 1.1680622792150868,
						"rouge2_max,none": 37.66322635174381,
						"rouge2_max_stderr,none": 1.0339594500148117,
						"rougeL_acc,none": 0.29498164014687883,
						"rougeL_acc_stderr,none": 0.015964400965589674,
						"rougeL_diff,none": -8.947157840870293,
						"rougeL_diff_stderr,none": 0.9835913993380507,
						"rougeL_max,none": 50.7677058099679,
						"rougeL_max_stderr,none": 0.8879196339431965
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25458996328029376,
						"acc_stderr,none": 0.015250117079156503,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.39878331560062363,
						"acc_stderr,none": 0.014125370004563044,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6373169038932425,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5554337024271663,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.61491331814627,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6787687450670876,
						"acc_stderr,none": 0.013123599324558321,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.43661971830985913,
						"acc_stderr,none": 0.0592793555841297,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.6153846153846154,
						"acc_stderr,none": 0.0479366886807504,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6221818181818182,
						"acc_stderr,none": 0.07232487995885858,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.594,
						"acc_stderr,none": 0.021983962090086333,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.022371610982580396,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.019920483209566055,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.019536923574747612,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.022270877485360437,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228134,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.626,
						"acc_stderr,none": 0.021660710347204487,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177514,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.02055326917420918,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43493975903614457,
						"acc_stderr,none": 0.04839382752943621,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3389558232931727,
						"acc_stderr,none": 0.009487992732201524,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.45863453815261046,
						"acc_stderr,none": 0.00998771641240657,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4843373493975904,
						"acc_stderr,none": 0.010017154458106754,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3895582329317269,
						"acc_stderr,none": 0.009774529590783674,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5377510040160642,
						"acc_stderr,none": 0.00999346636087279,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4887550200803213,
						"acc_stderr,none": 0.010019537972975081,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4899598393574297,
						"acc_stderr,none": 0.010020052116889137,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43453815261044176,
						"acc_stderr,none": 0.00993580735485682,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4746987951807229,
						"acc_stderr,none": 0.010009233363499405,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3815261044176707,
						"acc_stderr,none": 0.009736668133098168,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41164658634538154,
						"acc_stderr,none": 0.009864360821750342,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.45140562248995986,
						"acc_stderr,none": 0.00997462804772198,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40602409638554215,
						"acc_stderr,none": 0.00984346200738423,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42008032128514056,
						"acc_stderr,none": 0.0098932194691157,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3562248995983936,
						"acc_stderr,none": 0.009598796305792164,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6323325913001624,
						"acc_stderr,none": 0.05339216973724912,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.600264725347452,
						"acc_stderr,none": 0.012605764077627148,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7782925215089345,
						"acc_stderr,none": 0.010689887294959677,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.71409662475182,
						"acc_stderr,none": 0.01162785634694062,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5598941098610192,
						"acc_stderr,none": 0.01277447516071633,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6068828590337525,
						"acc_stderr,none": 0.01256970115195732,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6637988087359364,
						"acc_stderr,none": 0.012157083081239747,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5473196558570483,
						"acc_stderr,none": 0.012809372866181957,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.684976836532098,
						"acc_stderr,none": 0.011954205387840951,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5598941098610192,
						"acc_stderr,none": 0.012774475160716323,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5936465916611515,
						"acc_stderr,none": 0.012639429420389868,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6465916611515553,
						"acc_stderr,none": 0.012301695486460663,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8118678354686446,
						"acc_stderr,none": 0.036824418228777196,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8701075268817204,
						"acc_stderr,none": 0.006973653965627705,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7590361445783133,
						"acc_stderr,none": 0.047228076059872566,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7497393117831074,
						"acc_stderr,none": 0.013994864706473825,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7756653992395437,
						"acc_stderr,none": 0.025771203207084706,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6634920634920635,
						"acc_stderr,none": 0.026665559335926015,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7817460317460317,
						"acc_stderr,none": 0.0184174680241397,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/blink4-final_pth"
	},
	"./rwkv-x-dev/bruber_9b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6240135287485907,
						"acc_norm,none": 0.6335963923337091,
						"acc_norm_stderr,none": 0.08883977215704557,
						"acc_stderr,none": 0.10348420212561084,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3546875,
						"acc_stderr,none": 0.01486521313059255,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8366865671641791,
						"acc_stderr,none": 0.14154376565375207,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2996891728544293,
						"acc_norm,none": 0.2996891728544293,
						"acc_norm_stderr,none": 0.055039626921429746,
						"acc_stderr,none": 0.055039626921429746,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5341531681753215,
						"acc_stderr,none": 0.008287928900304097,
						"alias": "glue",
						"f1,none": 0.6490161163942175,
						"f1_stderr,none": 0.00027659914764989804,
						"mcc,none": 0.025208083291660098,
						"mcc_stderr,none": 0.03319251627775123
					},
					"lambada": {
						"acc,none": 0.7145352222006598,
						"acc_stderr,none": 0.021690167525119226,
						"alias": "lambada",
						"perplexity,none": 3.7498847633182866,
						"perplexity_stderr,none": 0.29742689720133125
					},
					"lambada_multilingual": {
						"acc,none": 0.541820298855036,
						"acc_stderr,none": 0.08910457244092326,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.940024678125603,
						"perplexity_stderr,none": 8.851802987642479
					},
					"mmlu": {
						"acc,none": 0.31932773109243695,
						"acc_stderr,none": 0.05281377760305703,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.30669500531349625,
						"acc_stderr,none": 0.046684012108880915,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.35629224332153203,
						"acc_stderr,none": 0.042403440033874494,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3457913552161196,
						"acc_stderr,none": 0.046105796618286045,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2759276879162703,
						"acc_stderr,none": 0.0558237068265109,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4632857142857143,
						"acc_stderr,none": 0.05254008462814893,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7404627613880729,
						"acc_norm,none": 0.637063884816814,
						"acc_norm_stderr,none": 0.009838673458380291,
						"acc_stderr,none": 0.1375167773496648,
						"alias": "pythia",
						"bits_per_byte,none": 0.6418735113773659,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5603541499377176,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.177009274087739,
						"perplexity_stderr,none": 0.06327343575776845,
						"word_perplexity,none": 10.79571520647912,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.31572710811905147,
						"acc_stderr,none": 0.0014275466800908211,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2913096695226438,
						"bleu_acc_stderr,none": 0.01590598704818483,
						"bleu_diff,none": -7.015492879644246,
						"bleu_diff_stderr,none": 0.8202522569573135,
						"bleu_max,none": 24.925938188081272,
						"bleu_max_stderr,none": 0.8124607538157117,
						"rouge1_acc,none": 0.25703794369645044,
						"rouge1_acc_stderr,none": 0.015298077509485088,
						"rouge1_diff,none": -9.107386123211176,
						"rouge1_diff_stderr,none": 0.8795274105011827,
						"rouge1_max,none": 48.00517855309214,
						"rouge1_max_stderr,none": 0.9701878631298597,
						"rouge2_acc,none": 0.23255813953488372,
						"rouge2_acc_stderr,none": 0.014789157531080501,
						"rouge2_diff,none": -11.027787199859466,
						"rouge2_diff_stderr,none": 1.053755183481189,
						"rouge2_max,none": 33.18100281561311,
						"rouge2_max_stderr,none": 1.0285638555587886,
						"rougeL_acc,none": 0.2582619339045288,
						"rougeL_acc_stderr,none": 0.015321821688476178,
						"rougeL_diff,none": -9.392047504042909,
						"rougeL_diff_stderr,none": 0.896324737751785,
						"rougeL_max,none": 45.370497664448095,
						"rougeL_max_stderr,none": 0.9724912658947464
					},
					"xcopa": {
						"acc,none": 0.6154545454545455,
						"acc_stderr,none": 0.07026633916338663,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43167336010709506,
						"acc_stderr,none": 0.055157505641048286,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6172913783767523,
						"acc_stderr,none": 0.06939720515724919,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7999550460777702,
						"acc_stderr,none": 0.03675907681957291,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6240135287485907,
						"acc_norm,none": 0.6335963923337091,
						"acc_norm_stderr,none": 0.08883977215704557,
						"acc_stderr,none": 0.10348420212561084,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3546875,
						"acc_stderr,none": 0.01486521313059255,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.359,
						"acc_stderr,none": 0.0151772642247986,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.349,
						"acc_stderr,none": 0.015080663991563097,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.35583333333333333,
						"acc_stderr,none": 0.01382651874849331,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4052901023890785,
						"acc_norm,none": 0.4462457337883959,
						"acc_norm_stderr,none": 0.014526705548539982,
						"acc_stderr,none": 0.014346869060229335,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7319023569023569,
						"acc_norm,none": 0.726010101010101,
						"acc_norm_stderr,none": 0.009151805901544028,
						"acc_stderr,none": 0.0090895265782137,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8366865671641791,
						"acc_stderr,none": 0.14154376565375207,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151112,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426635,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578052,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.814,
						"acc_stderr,none": 0.0123107902084128,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340988,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.767,
						"acc_stderr,none": 0.013374972519220055,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.579,
						"acc_stderr,none": 0.015620595475301315,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.012886662332274533,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.010605256784796577,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448856,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178363,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796396,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474919,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666683,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.010605256784796558,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.00877616208949111,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306489,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345703,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.764,
						"acc_stderr,none": 0.013434451402438688,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.767,
						"acc_stderr,none": 0.013374972519220069,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.785,
						"acc_stderr,none": 0.012997843819031825,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796384,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.011569479368271305,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045044,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.303,
						"acc_stderr,none": 0.014539683710535259,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849877,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.789,
						"acc_stderr,none": 0.012909130321042097,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.693,
						"acc_stderr,none": 0.014593284892852618,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.837,
						"acc_stderr,none": 0.011686212712746844,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919297,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345684,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280302,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929341008,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.01460648312734276,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.00989939381972444,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.01573351656634783,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.701,
						"acc_stderr,none": 0.014484778521220466,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.699,
						"acc_stderr,none": 0.014512395033543147,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695449,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855742,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783208,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866435,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.013232501619085341,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.0068954729748978965,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345693,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.827,
						"acc_stderr,none": 0.011967214137559959,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.573,
						"acc_stderr,none": 0.015649789644462217,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.005814534272734927,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.00923305200078774,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.003148000938676773,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.736,
						"acc_stderr,none": 0.013946271849440464,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.015811198373114885,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.00997775303139724,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032140007,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.677,
						"acc_stderr,none": 0.014794927843348635,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849877,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343968,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.012583693787968121,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.0111717862854965,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706823,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.0078552979386976,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.0044294039801783544,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410042,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.377,
						"acc_stderr,none": 0.015333170125779864,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.344,
						"acc_stderr,none": 0.01502963372440895,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2996891728544293,
						"acc_norm,none": 0.2996891728544293,
						"acc_norm_stderr,none": 0.055039626921429746,
						"acc_stderr,none": 0.055039626921429746,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28994082840236685,
						"acc_norm,none": 0.28994082840236685,
						"acc_norm_stderr,none": 0.03500638924911012,
						"acc_stderr,none": 0.03500638924911012,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.03662869876642905,
						"acc_stderr,none": 0.03662869876642905,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.21341463414634146,
						"acc_norm,none": 0.21341463414634146,
						"acc_norm_stderr,none": 0.03209158941079731,
						"acc_stderr,none": 0.03209158941079731,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.36875,
						"acc_norm,none": 0.36875,
						"acc_norm_stderr,none": 0.03826204233503226,
						"acc_stderr,none": 0.03826204233503226,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.035679697722680474,
						"acc_stderr,none": 0.035679697722680474,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.03344678470011799,
						"acc_stderr,none": 0.03344678470011799,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.03311643267635493,
						"acc_stderr,none": 0.03311643267635493,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3053435114503817,
						"acc_norm,none": 0.3053435114503817,
						"acc_norm_stderr,none": 0.04039314978724562,
						"acc_stderr,none": 0.04039314978724562,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.0402637721078731,
						"acc_stderr,none": 0.0402637721078731,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.308411214953271,
						"acc_norm,none": 0.308411214953271,
						"acc_norm_stderr,none": 0.04485760883316698,
						"acc_stderr,none": 0.04485760883316698,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3188854489164087,
						"acc_norm,none": 0.3188854489164087,
						"acc_norm_stderr,none": 0.025971647189191573,
						"acc_stderr,none": 0.025971647189191573,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.27450980392156865,
						"acc_norm,none": 0.27450980392156865,
						"acc_norm_stderr,none": 0.031321798030832904,
						"acc_stderr,none": 0.031321798030832904,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2849162011173184,
						"acc_norm,none": 0.2849162011173184,
						"acc_norm_stderr,none": 0.03383195081328526,
						"acc_stderr,none": 0.03383195081328526,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.3037974683544304,
						"acc_norm,none": 0.3037974683544304,
						"acc_norm_stderr,none": 0.029936696387138605,
						"acc_stderr,none": 0.029936696387138605,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.1792452830188679,
						"acc_norm,none": 0.1792452830188679,
						"acc_norm_stderr,none": 0.037431386312552786,
						"acc_stderr,none": 0.037431386312552786,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3364485981308411,
						"acc_norm,none": 0.3364485981308411,
						"acc_norm_stderr,none": 0.045892711114716274,
						"acc_stderr,none": 0.045892711114716274,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.37735849056603776,
						"acc_norm,none": 0.37735849056603776,
						"acc_norm_stderr,none": 0.047304390228528934,
						"acc_stderr,none": 0.047304390228528934,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.24074074074074073,
						"acc_norm,none": 0.24074074074074073,
						"acc_norm_stderr,none": 0.041331194402438376,
						"acc_stderr,none": 0.041331194402438376,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.04396093377439376,
						"acc_stderr,none": 0.04396093377439376,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.304029304029304,
						"acc_norm,none": 0.304029304029304,
						"acc_norm_stderr,none": 0.027891299397152947,
						"acc_stderr,none": 0.027891299397152947,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.31862745098039214,
						"acc_norm,none": 0.31862745098039214,
						"acc_norm_stderr,none": 0.0327028718148208,
						"acc_stderr,none": 0.0327028718148208,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2982456140350877,
						"acc_norm,none": 0.2982456140350877,
						"acc_norm_stderr,none": 0.03508771929824565,
						"acc_stderr,none": 0.03508771929824565,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.23129251700680273,
						"acc_norm,none": 0.23129251700680273,
						"acc_norm_stderr,none": 0.034896744812616155,
						"acc_stderr,none": 0.034896744812616155,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.31654676258992803,
						"acc_norm,none": 0.31654676258992803,
						"acc_norm_stderr,none": 0.039594402847357935,
						"acc_stderr,none": 0.039594402847357935,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.03652215878407505,
						"acc_stderr,none": 0.03652215878407505,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3374233128834356,
						"acc_norm,none": 0.3374233128834356,
						"acc_norm_stderr,none": 0.03714908409935574,
						"acc_stderr,none": 0.03714908409935574,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.31976744186046513,
						"acc_norm,none": 0.31976744186046513,
						"acc_norm_stderr,none": 0.03566545538084812,
						"acc_stderr,none": 0.03566545538084812,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.028746730632681367,
						"acc_stderr,none": 0.028746730632681367,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.03191178226713547,
						"acc_stderr,none": 0.03191178226713547,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3739495798319328,
						"acc_norm,none": 0.3739495798319328,
						"acc_norm_stderr,none": 0.03142946637883708,
						"acc_stderr,none": 0.03142946637883708,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.23478260869565218,
						"acc_norm,none": 0.23478260869565218,
						"acc_norm_stderr,none": 0.028009647070930115,
						"acc_stderr,none": 0.028009647070930115,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3006993006993007,
						"acc_norm,none": 0.3006993006993007,
						"acc_norm_stderr,none": 0.03848167949490064,
						"acc_stderr,none": 0.03848167949490064,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.3352272727272727,
						"acc_norm,none": 0.3352272727272727,
						"acc_norm_stderr,none": 0.03568512682153707,
						"acc_stderr,none": 0.03568512682153707,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.348993288590604,
						"acc_norm,none": 0.348993288590604,
						"acc_norm_stderr,none": 0.0391805395977528,
						"acc_stderr,none": 0.0391805395977528,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331528,
						"acc_stderr,none": 0.03360300796331528,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552485,
						"acc_stderr,none": 0.03703667194552485,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3220338983050847,
						"acc_norm,none": 0.3220338983050847,
						"acc_norm_stderr,none": 0.04319782230261343,
						"acc_stderr,none": 0.04319782230261343,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.21818181818181817,
						"acc_norm,none": 0.21818181818181817,
						"acc_norm_stderr,none": 0.03955932861795833,
						"acc_stderr,none": 0.03955932861795833,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.042163702135578345,
						"acc_stderr,none": 0.042163702135578345,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.032092816451453864,
						"acc_stderr,none": 0.032092816451453864,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.29651162790697677,
						"acc_norm,none": 0.29651162790697677,
						"acc_norm_stderr,none": 0.03492619473255953,
						"acc_stderr,none": 0.03492619473255953,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.29927007299270075,
						"acc_norm,none": 0.29927007299270075,
						"acc_norm_stderr,none": 0.02261596114573682,
						"acc_stderr,none": 0.02261596114573682,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.411214953271028,
						"acc_norm,none": 0.411214953271028,
						"acc_norm_stderr,none": 0.03371498987315741,
						"acc_stderr,none": 0.03371498987315741,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2845528455284553,
						"acc_norm,none": 0.2845528455284553,
						"acc_norm_stderr,none": 0.040849837332392225,
						"acc_stderr,none": 0.040849837332392225,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3524590163934426,
						"acc_norm,none": 0.3524590163934426,
						"acc_norm_stderr,none": 0.0434305428342706,
						"acc_stderr,none": 0.0434305428342706,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.031840067304739414,
						"acc_stderr,none": 0.031840067304739414,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3277777777777778,
						"acc_norm,none": 0.3277777777777778,
						"acc_norm_stderr,none": 0.03508485373860693,
						"acc_stderr,none": 0.03508485373860693,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.328042328042328,
						"acc_norm,none": 0.328042328042328,
						"acc_norm_stderr,none": 0.034241830758536596,
						"acc_stderr,none": 0.034241830758536596,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.04083221538649574,
						"acc_stderr,none": 0.04083221538649574,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.30344827586206896,
						"acc_norm,none": 0.30344827586206896,
						"acc_norm_stderr,none": 0.038312260488503336,
						"acc_stderr,none": 0.038312260488503336,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.04622501635210239,
						"acc_stderr,none": 0.04622501635210239,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26857142857142857,
						"acc_norm,none": 0.26857142857142857,
						"acc_norm_stderr,none": 0.033600151915923894,
						"acc_stderr,none": 0.033600151915923894,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26066350710900477,
						"acc_norm,none": 0.26066350710900477,
						"acc_norm_stderr,none": 0.030293645661742804,
						"acc_stderr,none": 0.030293645661742804,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.22872340425531915,
						"acc_norm,none": 0.22872340425531915,
						"acc_norm_stderr,none": 0.02168926054379154,
						"acc_stderr,none": 0.02168926054379154,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.40086206896551724,
						"acc_norm,none": 0.40086206896551724,
						"acc_norm_stderr,none": 0.03224444451161073,
						"acc_stderr,none": 0.03224444451161073,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3160919540229885,
						"acc_norm,none": 0.3160919540229885,
						"acc_norm_stderr,none": 0.035349438976908586,
						"acc_stderr,none": 0.035349438976908586,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3230088495575221,
						"acc_norm,none": 0.3230088495575221,
						"acc_norm_stderr,none": 0.031175070714705388,
						"acc_stderr,none": 0.031175070714705388,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.03567969772268048,
						"acc_stderr,none": 0.03567969772268048,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.03186439492581517,
						"acc_stderr,none": 0.03186439492581517,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.31952662721893493,
						"acc_norm,none": 0.31952662721893493,
						"acc_norm_stderr,none": 0.03597530251676527,
						"acc_stderr,none": 0.03597530251676527,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.37888198757763975,
						"acc_norm,none": 0.37888198757763975,
						"acc_norm_stderr,none": 0.03835120818393935,
						"acc_stderr,none": 0.03835120818393935,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.3375,
						"acc_norm,none": 0.3375,
						"acc_norm_stderr,none": 0.03749999999999997,
						"acc_stderr,none": 0.03749999999999997,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.025208083291660098,
						"mcc_stderr,none": 0.03319251627775123
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.0348735088019777,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5341531681753215,
						"acc_stderr,none": 0.008287928900304097,
						"alias": "glue",
						"f1,none": 0.6490161163942175,
						"f1_stderr,none": 0.00027659914764989804,
						"mcc,none": 0.025208083291660098,
						"mcc_stderr,none": 0.03319251627775123
					},
					"hellaswag": {
						"acc,none": 0.5310695080661223,
						"acc_norm,none": 0.7106154152559251,
						"acc_norm_stderr,none": 0.004525499540017863,
						"acc_stderr,none": 0.00498013867916104,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7145352222006598,
						"acc_stderr,none": 0.021690167525119226,
						"alias": "lambada",
						"perplexity,none": 3.7498847633182866,
						"perplexity_stderr,none": 0.29742689720133125
					},
					"lambada_multilingual": {
						"acc,none": 0.541820298855036,
						"acc_stderr,none": 0.08910457244092326,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.940024678125603,
						"perplexity_stderr,none": 8.851802987642479
					},
					"lambada_openai": {
						"acc,none": 0.7562584901998836,
						"acc_stderr,none": 0.0059815254233217575,
						"alias": " - lambada_openai",
						"perplexity,none": 3.177009274087739,
						"perplexity_stderr,none": 0.06327343575776845
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4261595187269552,
						"acc_stderr,none": 0.006889596071653633,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 36.12066660228794,
						"perplexity_stderr,none": 2.0884425979663916
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7562584901998836,
						"acc_stderr,none": 0.005981525423321757,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.1771910762738482,
						"perplexity_stderr,none": 0.06327635623038164
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.44362507277314184,
						"acc_stderr,none": 0.006921558436638474,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 31.583466617673086,
						"perplexity_stderr,none": 1.6203065015164873
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5565689889384824,
						"acc_stderr,none": 0.006921251108304396,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.550879588884072,
						"perplexity_stderr,none": 0.830624815054358
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5264894236367165,
						"acc_stderr,none": 0.006956194880237216,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.267919505509067,
						"perplexity_stderr,none": 1.2341173619181574
					},
					"lambada_standard": {
						"acc,none": 0.6730060159130603,
						"acc_stderr,none": 0.0065356897404871194,
						"alias": " - lambada_standard",
						"perplexity,none": 4.322529546532115,
						"perplexity_stderr,none": 0.09458487515713265
					},
					"logiqa": {
						"acc,none": 0.23195084485407066,
						"acc_norm,none": 0.27035330261136714,
						"acc_norm_stderr,none": 0.017420694783393142,
						"acc_stderr,none": 0.016555252497925898,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.31932773109243695,
						"acc_stderr,none": 0.05281377760305703,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.28888888888888886,
						"acc_stderr,none": 0.0391545063041425,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3092105263157895,
						"acc_stderr,none": 0.03761070869867479,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3849056603773585,
						"acc_stderr,none": 0.02994649856769995,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2986111111111111,
						"acc_stderr,none": 0.03827052357950756,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909283,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036845,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3179190751445087,
						"acc_stderr,none": 0.0355068398916558,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.22549019607843138,
						"acc_stderr,none": 0.041583075330832865,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.34893617021276596,
						"acc_stderr,none": 0.031158522131357773,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2719298245614035,
						"acc_stderr,none": 0.04185774424022056,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.33793103448275863,
						"acc_stderr,none": 0.03941707632064889,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2671957671957672,
						"acc_stderr,none": 0.022789673145776575,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.04073524322147126,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3774193548387097,
						"acc_stderr,none": 0.027575960723278236,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.23645320197044334,
						"acc_stderr,none": 0.029896114291733552,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.044084400227680794,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3212121212121212,
						"acc_stderr,none": 0.03646204963253812,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3787878787878788,
						"acc_stderr,none": 0.03456088731993747,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.42487046632124353,
						"acc_stderr,none": 0.035674713352125395,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.30256410256410254,
						"acc_stderr,none": 0.023290888053772725,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23703703703703705,
						"acc_stderr,none": 0.025928876132766104,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3067226890756303,
						"acc_stderr,none": 0.02995382389188705,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23178807947019867,
						"acc_stderr,none": 0.034454062719870546,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3376146788990826,
						"acc_stderr,none": 0.020275265986638917,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.19907407407407407,
						"acc_stderr,none": 0.027232298462690232,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.37745098039215685,
						"acc_stderr,none": 0.03402272044340703,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3080168776371308,
						"acc_stderr,none": 0.0300523893356057,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.33183856502242154,
						"acc_stderr,none": 0.03160295143776679,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3282442748091603,
						"acc_stderr,none": 0.04118438565806298,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.30669500531349625,
						"acc_stderr,none": 0.046684012108880915,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.3305785123966942,
						"acc_stderr,none": 0.04294340845212093,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3148148148148148,
						"acc_stderr,none": 0.04489931073591312,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3067484662576687,
						"acc_stderr,none": 0.03623089915724147,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.26785714285714285,
						"acc_stderr,none": 0.04203277291467764,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.36893203883495146,
						"acc_stderr,none": 0.047776151811567386,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.39316239316239315,
						"acc_stderr,none": 0.03199957924651047,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4125159642401022,
						"acc_stderr,none": 0.01760414910867193,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.33815028901734107,
						"acc_stderr,none": 0.02546977014940017,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.32679738562091504,
						"acc_stderr,none": 0.026857294663281413,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.35629224332153203,
						"acc_stderr,none": 0.042403440033874494,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.42765273311897106,
						"acc_stderr,none": 0.028099240775809567,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3425925925925926,
						"acc_stderr,none": 0.02640614597362566,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2730496453900709,
						"acc_stderr,none": 0.02657786094330786,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2848761408083442,
						"acc_stderr,none": 0.011527830846369,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3382352941176471,
						"acc_stderr,none": 0.028739328513983572,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.315359477124183,
						"acc_stderr,none": 0.018798086284886877,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.37272727272727274,
						"acc_stderr,none": 0.04631381319425465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.37551020408163266,
						"acc_stderr,none": 0.031001209039894836,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3457913552161196,
						"acc_stderr,none": 0.046105796618286045,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.42786069651741293,
						"acc_stderr,none": 0.03498541988407795,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2759276879162703,
						"acc_stderr,none": 0.0558237068265109,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.05009082659620332,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.29518072289156627,
						"acc_stderr,none": 0.0355092018568963,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.39766081871345027,
						"acc_stderr,none": 0.03753638955761691,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.38166072338257767,
						"acc_stderr,none": 0.0049037590710875704,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.37093165174938975,
						"acc_stderr,none": 0.0048718848587678085,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7156862745098039,
						"acc_stderr,none": 0.022359549679883524,
						"alias": " - mrpc",
						"f1,none": 0.8263473053892215,
						"f1_stderr,none": 0.015858028047186716
					},
					"openbookqa": {
						"acc,none": 0.314,
						"acc_norm,none": 0.412,
						"acc_norm_stderr,none": 0.02203367799374087,
						"acc_stderr,none": 0.020776701920308997,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4215,
						"acc_stderr,none": 0.011044449507896285,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.365,
						"acc_stderr,none": 0.010767797952059087,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.010957190790298969,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5455,
						"acc_stderr,none": 0.011136735987003717,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.011170245619215438,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.483,
						"acc_stderr,none": 0.011176670299310673,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.011182778154985876,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4632857142857143,
						"acc_stderr,none": 0.05254008462814893,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.76550598476605,
						"acc_norm,none": 0.7714907508161044,
						"acc_norm_stderr,none": 0.00979631351182952,
						"acc_stderr,none": 0.009885203143240547,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7404627613880729,
						"acc_norm,none": 0.637063884816814,
						"acc_norm_stderr,none": 0.009838673458380291,
						"acc_stderr,none": 0.1375167773496648,
						"alias": "pythia",
						"bits_per_byte,none": 0.6418735113773659,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5603541499377176,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.177009274087739,
						"perplexity_stderr,none": 0.06327343575776845,
						"word_perplexity,none": 10.79571520647912,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.4958813838550247,
						"acc_stderr,none": 0.006765181024578747,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6058372495671531,
						"acc_stderr,none": 0.0024303525434540333,
						"alias": " - qqp",
						"f1,none": 0.6474804229527054,
						"f1_stderr,none": 0.00261305947149439
					},
					"record": {
						"alias": "record",
						"em,none": 0.2773,
						"em_stderr,none": 0.00447688231334351,
						"f1,none": 0.28653190499246123,
						"f1_stderr,none": 0.004486110423212122
					},
					"rte": {
						"acc,none": 0.5776173285198556,
						"acc_stderr,none": 0.029731622646495887,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.946,
						"acc_norm,none": 0.927,
						"acc_norm_stderr,none": 0.00823035471524407,
						"acc_stderr,none": 0.007150883521295442,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9174311926605505,
						"acc_stderr,none": 0.009325791021628796,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.31572710811905147,
						"acc_stderr,none": 0.0014275466800908211,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2913096695226438,
						"bleu_acc_stderr,none": 0.01590598704818483,
						"bleu_diff,none": -7.015492879644246,
						"bleu_diff_stderr,none": 0.8202522569573135,
						"bleu_max,none": 24.925938188081272,
						"bleu_max_stderr,none": 0.8124607538157117,
						"rouge1_acc,none": 0.25703794369645044,
						"rouge1_acc_stderr,none": 0.015298077509485088,
						"rouge1_diff,none": -9.107386123211176,
						"rouge1_diff_stderr,none": 0.8795274105011827,
						"rouge1_max,none": 48.00517855309214,
						"rouge1_max_stderr,none": 0.9701878631298597,
						"rouge2_acc,none": 0.23255813953488372,
						"rouge2_acc_stderr,none": 0.014789157531080501,
						"rouge2_diff,none": -11.027787199859466,
						"rouge2_diff_stderr,none": 1.053755183481189,
						"rouge2_max,none": 33.18100281561311,
						"rouge2_max_stderr,none": 1.0285638555587886,
						"rougeL_acc,none": 0.2582619339045288,
						"rougeL_acc_stderr,none": 0.015321821688476178,
						"rougeL_diff,none": -9.392047504042909,
						"rougeL_diff_stderr,none": 0.896324737751785,
						"rougeL_max,none": 45.370497664448095,
						"rougeL_max_stderr,none": 0.9724912658947464
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2913096695226438,
						"bleu_acc_stderr,none": 0.01590598704818483,
						"bleu_diff,none": -7.015492879644246,
						"bleu_diff_stderr,none": 0.8202522569573135,
						"bleu_max,none": 24.925938188081272,
						"bleu_max_stderr,none": 0.8124607538157117,
						"rouge1_acc,none": 0.25703794369645044,
						"rouge1_acc_stderr,none": 0.015298077509485088,
						"rouge1_diff,none": -9.107386123211176,
						"rouge1_diff_stderr,none": 0.8795274105011827,
						"rouge1_max,none": 48.00517855309214,
						"rouge1_max_stderr,none": 0.9701878631298597,
						"rouge2_acc,none": 0.23255813953488372,
						"rouge2_acc_stderr,none": 0.014789157531080501,
						"rouge2_diff,none": -11.027787199859466,
						"rouge2_diff_stderr,none": 1.053755183481189,
						"rouge2_max,none": 33.18100281561311,
						"rouge2_max_stderr,none": 1.0285638555587886,
						"rougeL_acc,none": 0.2582619339045288,
						"rougeL_acc_stderr,none": 0.015321821688476178,
						"rougeL_diff,none": -9.392047504042909,
						"rougeL_diff_stderr,none": 0.896324737751785,
						"rougeL_max,none": 45.370497664448095,
						"rougeL_max_stderr,none": 0.9724912658947464
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2460220318237454,
						"acc_stderr,none": 0.015077219200662588,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3854321844143575,
						"acc_stderr,none": 0.014040805913520171,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6418735113773659,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5603541499377176,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.79571520647912,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.675611681136543,
						"acc_stderr,none": 0.013157225726641637,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4225352112676056,
						"acc_stderr,none": 0.05903984205682581,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.38461538461538464,
						"acc_stderr,none": 0.047936688680750406,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6154545454545455,
						"acc_stderr,none": 0.07026633916338663,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.02203367799374087,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231336,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.020143572847290785,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.736,
						"acc_stderr,none": 0.0197328855859221,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928028,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407056,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.02217510926561316,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.022175109265613165,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.636,
						"acc_stderr,none": 0.02153917063731769,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.020776701920308997,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.02055326917420918,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43167336010709506,
						"acc_stderr,none": 0.055157505641048286,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337343,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4670682730923695,
						"acc_stderr,none": 0.010000311392557843,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4899598393574297,
						"acc_stderr,none": 0.010020052116889137,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38032128514056224,
						"acc_stderr,none": 0.009730746464767608,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5381526104417671,
						"acc_stderr,none": 0.009992853579749944,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4947791164658635,
						"acc_stderr,none": 0.010021526496530351,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4931726907630522,
						"acc_stderr,none": 0.010021138522919167,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41365461847389556,
						"acc_stderr,none": 0.009871502159099366,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4903614457831325,
						"acc_stderr,none": 0.010020210558438308,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3534136546184739,
						"acc_stderr,none": 0.009581698005070976,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.363855421686747,
						"acc_stderr,none": 0.009643393577626719,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4682730923694779,
						"acc_stderr,none": 0.0100018761464667,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.39518072289156625,
						"acc_stderr,none": 0.009799371892746737,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42891566265060244,
						"acc_stderr,none": 0.009920273121045582,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.36224899598393573,
						"acc_stderr,none": 0.009634223618009008,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6172913783767523,
						"acc_stderr,none": 0.06939720515724919,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5896757114493713,
						"acc_stderr,none": 0.012658485800663387,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7822634017207147,
						"acc_stderr,none": 0.010620714860047856,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7127729980145598,
						"acc_stderr,none": 0.01164393516114786,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5493050959629384,
						"acc_stderr,none": 0.01280441272012667,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5804103242885507,
						"acc_stderr,none": 0.012699642268200759,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6565188616810059,
						"acc_stderr,none": 0.01222043251361922,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.499669093315685,
						"acc_stderr,none": 0.012867122498493415,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6783587028457975,
						"acc_stderr,none": 0.012020627225185132,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5446724023825281,
						"acc_stderr,none": 0.012815666542067282,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5552614162806089,
						"acc_stderr,none": 0.012788295970207789,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6412971542025149,
						"acc_stderr,none": 0.012342655113112376,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7999550460777702,
						"acc_stderr,none": 0.03675907681957291,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8606451612903225,
						"acc_stderr,none": 0.0071838131908631645,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.05066394254941721,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7340980187695516,
						"acc_stderr,none": 0.01427430782530696,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7642585551330798,
						"acc_stderr,none": 0.026223308206222522,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6730158730158731,
						"acc_stderr,none": 0.026473487980890983,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7599206349206349,
						"acc_stderr,none": 0.019044849417856065,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/bruber_9b"
	},
	"./rwkv-x-dev/chunk0-0_8_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6200676437429538,
						"acc_norm,none": 0.6073844419391207,
						"acc_norm_stderr,none": 0.08936073016126243,
						"acc_stderr,none": 0.11041333155072695,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3565625,
						"acc_stderr,none": 0.018017580022735433,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8245223880597015,
						"acc_stderr,none": 0.1707629083868832,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.26774304955966155,
						"acc_norm,none": 0.26774304955966155,
						"acc_norm_stderr,none": 0.043587108171375746,
						"acc_stderr,none": 0.043587108171375746,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5245057170080991,
						"acc_stderr,none": 0.010624599636447029,
						"alias": "glue",
						"f1,none": 0.6491011620815851,
						"f1_stderr,none": 0.00032108186827200434,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.7109450805356103,
						"acc_stderr,none": 0.017702939922475264,
						"alias": "lambada",
						"perplexity,none": 3.876580206940784,
						"perplexity_stderr,none": 0.2428651059071946
					},
					"lambada_multilingual": {
						"acc,none": 0.5289734135455074,
						"acc_stderr,none": 0.08329939113702933,
						"alias": "lambada_multilingual",
						"perplexity,none": 22.680879254928186,
						"perplexity_stderr,none": 8.791298890806734
					},
					"mmlu": {
						"acc,none": 0.3124910981341689,
						"acc_stderr,none": 0.05895696162239771,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.29734325185972377,
						"acc_stderr,none": 0.05539871215244521,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.35693595107821047,
						"acc_stderr,none": 0.04488848662286952,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3282417939551511,
						"acc_stderr,none": 0.05593572854883733,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2759276879162702,
						"acc_stderr,none": 0.06176008065466927,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4807142857142857,
						"acc_stderr,none": 0.05275166826504779,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7299766179422126,
						"acc_norm,none": 0.611828937448934,
						"acc_norm_stderr,none": 0.010060762980937102,
						"acc_stderr,none": 0.1590104789971228,
						"alias": "pythia",
						"bits_per_byte,none": 0.6358205792814908,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5538212846214599,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4196024331302066,
						"perplexity_stderr,none": 0.06747647957277843,
						"word_perplexity,none": 10.556203501453586,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3181906504722058,
						"acc_stderr,none": 0.001512062510265972,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3182374541003672,
						"bleu_acc_stderr,none": 0.01630598864892062,
						"bleu_diff,none": -7.318250650318278,
						"bleu_diff_stderr,none": 0.8891580932461726,
						"bleu_max,none": 27.23746358054591,
						"bleu_max_stderr,none": 0.809736633362668,
						"rouge1_acc,none": 0.27906976744186046,
						"rouge1_acc_stderr,none": 0.015702107090627897,
						"rouge1_diff,none": -10.068802225564218,
						"rouge1_diff_stderr,none": 0.980821073542755,
						"rouge1_max,none": 51.94513875554285,
						"rouge1_max_stderr,none": 0.8975919278631865,
						"rouge2_acc,none": 0.25091799265605874,
						"rouge2_acc_stderr,none": 0.015176985027707694,
						"rouge2_diff,none": -12.067446550541952,
						"rouge2_diff_stderr,none": 1.1628228143432835,
						"rouge2_max,none": 35.78404244962929,
						"rouge2_max_stderr,none": 1.0543486082648192,
						"rougeL_acc,none": 0.28518971848225216,
						"rougeL_acc_stderr,none": 0.015805827874454892,
						"rougeL_diff,none": -10.256601078328625,
						"rougeL_diff_stderr,none": 0.9896808491563573,
						"rougeL_max,none": 49.22924469071218,
						"rougeL_max_stderr,none": 0.9162320195331894
					},
					"xcopa": {
						"acc,none": 0.6156363636363636,
						"acc_stderr,none": 0.07188443941334852,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43820615796519413,
						"acc_stderr,none": 0.05038433417312249,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6252331388003128,
						"acc_stderr,none": 0.051720807244359346,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8136659923578332,
						"acc_stderr,none": 0.04622777589835031,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6200676437429538,
						"acc_norm,none": 0.6073844419391207,
						"acc_norm_stderr,none": 0.08936073016126243,
						"acc_stderr,none": 0.11041333155072695,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3565625,
						"acc_stderr,none": 0.018017580022735433,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.377,
						"acc_stderr,none": 0.015333170125779859,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.015167928865407559,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3383333333333333,
						"acc_stderr,none": 0.013664144006618268,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.386518771331058,
						"acc_norm,none": 0.4189419795221843,
						"acc_norm_stderr,none": 0.01441810695363901,
						"acc_stderr,none": 0.014230084761910483,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7352693602693603,
						"acc_norm,none": 0.7003367003367004,
						"acc_norm_stderr,none": 0.009400228586205968,
						"acc_stderr,none": 0.009053021086173965,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8245223880597015,
						"acc_stderr,none": 0.1707629083868832,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074794,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987277,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.001413505570557821,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.822,
						"acc_stderr,none": 0.012102167676183561,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592076,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.767,
						"acc_stderr,none": 0.01337497251922005,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.579,
						"acc_stderr,none": 0.01562059547530132,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.809,
						"acc_stderr,none": 0.012436787112179501,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346938,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565617,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140928,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.0065588122414061145,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.00612507277642612,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745895,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280312,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.00453647215130651,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345672,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.701,
						"acc_stderr,none": 0.014484778521220465,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.725,
						"acc_stderr,none": 0.014127086556490528,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.763,
						"acc_stderr,none": 0.013454070462577938,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323488,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.011297239823409305,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319425,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.014205696104091501,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662742,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.01248626873437014,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.611,
						"acc_stderr,none": 0.015424555647308496,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.775,
						"acc_stderr,none": 0.013211720158614748,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704159,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248106,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315141,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.715,
						"acc_stderr,none": 0.014282120955200482,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929341002,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.359,
						"acc_stderr,none": 0.015177264224798594,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.643,
						"acc_stderr,none": 0.01515852172148677,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.015594460144140601,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140914,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.014658474370509008,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695806,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662742,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.772,
						"acc_stderr,none": 0.013273740700804474,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697056,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000072,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139959,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504418,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.015813097547730987,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033825,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037197,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036433,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.01444273494157502,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.463,
						"acc_stderr,none": 0.015775927227262426,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397243,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584932,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.015625625112620667,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904616,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557843983,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.01107581480856704,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557421,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745911,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036277,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.0050348137353182585,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.365,
						"acc_stderr,none": 0.01523177622626491,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.315,
						"acc_stderr,none": 0.014696631960792505,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.26774304955966155,
						"acc_norm,none": 0.26774304955966155,
						"acc_norm_stderr,none": 0.043587108171375746,
						"acc_stderr,none": 0.043587108171375746,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331528,
						"acc_stderr,none": 0.03360300796331528,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.03662869876642904,
						"acc_stderr,none": 0.03662869876642904,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.0344500028917346,
						"acc_stderr,none": 0.0344500028917346,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.3375,
						"acc_norm,none": 0.3375,
						"acc_norm_stderr,none": 0.037499999999999964,
						"acc_stderr,none": 0.037499999999999964,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624336,
						"acc_stderr,none": 0.03546563019624336,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.030880282749398025,
						"acc_stderr,none": 0.030880282749398025,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2375,
						"acc_norm,none": 0.2375,
						"acc_norm_stderr,none": 0.03374839851779222,
						"acc_stderr,none": 0.03374839851779222,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2824427480916031,
						"acc_norm,none": 0.2824427480916031,
						"acc_norm_stderr,none": 0.03948406125768361,
						"acc_stderr,none": 0.03948406125768361,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2426470588235294,
						"acc_norm,none": 0.2426470588235294,
						"acc_norm_stderr,none": 0.036895193269968055,
						"acc_stderr,none": 0.036895193269968055,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.0452235007738203,
						"acc_stderr,none": 0.0452235007738203,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.024539600216850282,
						"acc_stderr,none": 0.024539600216850282,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604243,
						"acc_stderr,none": 0.030587591351604243,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2737430167597765,
						"acc_norm,none": 0.2737430167597765,
						"acc_norm_stderr,none": 0.03342001835130119,
						"acc_stderr,none": 0.03342001835130119,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24472573839662448,
						"acc_norm,none": 0.24472573839662448,
						"acc_norm_stderr,none": 0.027985699387036427,
						"acc_stderr,none": 0.027985699387036427,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.18867924528301888,
						"acc_norm,none": 0.18867924528301888,
						"acc_norm_stderr,none": 0.0381824426969915,
						"acc_stderr,none": 0.0381824426969915,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.34579439252336447,
						"acc_norm,none": 0.34579439252336447,
						"acc_norm_stderr,none": 0.04619693596622581,
						"acc_stderr,none": 0.04619693596622581,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3490566037735849,
						"acc_norm,none": 0.3490566037735849,
						"acc_norm_stderr,none": 0.04651841326529025,
						"acc_stderr,none": 0.04651841326529025,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2037037037037037,
						"acc_norm,none": 0.2037037037037037,
						"acc_norm_stderr,none": 0.03893542518824847,
						"acc_stderr,none": 0.03893542518824847,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.04429811949614585,
						"acc_stderr,none": 0.04429811949614585,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.042520162237633115,
						"acc_stderr,none": 0.042520162237633115,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.25274725274725274,
						"acc_norm,none": 0.25274725274725274,
						"acc_norm_stderr,none": 0.02635072265556439,
						"acc_stderr,none": 0.02635072265556439,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604243,
						"acc_stderr,none": 0.030587591351604243,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2573099415204678,
						"acc_norm,none": 0.2573099415204678,
						"acc_norm_stderr,none": 0.03352799844161865,
						"acc_stderr,none": 0.03352799844161865,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.272108843537415,
						"acc_norm,none": 0.272108843537415,
						"acc_norm_stderr,none": 0.036832239154550236,
						"acc_stderr,none": 0.036832239154550236,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2733812949640288,
						"acc_norm,none": 0.2733812949640288,
						"acc_norm_stderr,none": 0.037940071215336206,
						"acc_stderr,none": 0.037940071215336206,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.27044025157232704,
						"acc_norm,none": 0.27044025157232704,
						"acc_norm_stderr,none": 0.035337641019122276,
						"acc_stderr,none": 0.035337641019122276,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3006134969325153,
						"acc_norm,none": 0.3006134969325153,
						"acc_norm_stderr,none": 0.0360251131880677,
						"acc_stderr,none": 0.0360251131880677,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.23837209302325582,
						"acc_norm,none": 0.23837209302325582,
						"acc_norm_stderr,none": 0.032583750685258935,
						"acc_stderr,none": 0.032583750685258935,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.3134920634920635,
						"acc_norm,none": 0.3134920634920635,
						"acc_norm_stderr,none": 0.02928189612568394,
						"acc_stderr,none": 0.02928189612568394,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.02985751567338641,
						"acc_stderr,none": 0.02985751567338641,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3403361344537815,
						"acc_norm,none": 0.3403361344537815,
						"acc_norm_stderr,none": 0.030778057422931673,
						"acc_stderr,none": 0.030778057422931673,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.22608695652173913,
						"acc_norm,none": 0.22608695652173913,
						"acc_norm_stderr,none": 0.02764178570724133,
						"acc_stderr,none": 0.02764178570724133,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03785714465066654,
						"acc_stderr,none": 0.03785714465066654,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.26573426573426573,
						"acc_norm,none": 0.26573426573426573,
						"acc_norm_stderr,none": 0.037068604626235596,
						"acc_stderr,none": 0.037068604626235596,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2840909090909091,
						"acc_norm,none": 0.2840909090909091,
						"acc_norm_stderr,none": 0.034090909090909075,
						"acc_stderr,none": 0.034090909090909075,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.24161073825503357,
						"acc_norm,none": 0.24161073825503357,
						"acc_norm_stderr,none": 0.03518627932594346,
						"acc_stderr,none": 0.03518627932594346,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101963,
						"acc_stderr,none": 0.03334150198101963,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.03980329854920432,
						"acc_stderr,none": 0.03980329854920432,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.20909090909090908,
						"acc_norm,none": 0.20909090909090908,
						"acc_norm_stderr,none": 0.03895091015724136,
						"acc_stderr,none": 0.03895091015724136,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.036421927837417066,
						"acc_stderr,none": 0.036421927837417066,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.24603174603174602,
						"acc_norm,none": 0.24603174603174602,
						"acc_norm_stderr,none": 0.03852273364924318,
						"acc_stderr,none": 0.03852273364924318,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.03231470996617758,
						"acc_stderr,none": 0.03231470996617758,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.0345162887625062,
						"acc_stderr,none": 0.0345162887625062,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26520681265206814,
						"acc_norm,none": 0.26520681265206814,
						"acc_norm_stderr,none": 0.021801329069745176,
						"acc_stderr,none": 0.021801329069745176,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3037383177570093,
						"acc_norm,none": 0.3037383177570093,
						"acc_norm_stderr,none": 0.03150984286811783,
						"acc_stderr,none": 0.03150984286811783,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.04119323030208565,
						"acc_stderr,none": 0.04119323030208565,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3114754098360656,
						"acc_norm,none": 0.3114754098360656,
						"acc_norm_stderr,none": 0.04209969267310141,
						"acc_stderr,none": 0.04209969267310141,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2714285714285714,
						"acc_norm,none": 0.2714285714285714,
						"acc_norm_stderr,none": 0.030760309824226055,
						"acc_stderr,none": 0.030760309824226055,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032364888900157734,
						"acc_stderr,none": 0.032364888900157734,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.30687830687830686,
						"acc_norm,none": 0.30687830687830686,
						"acc_norm_stderr,none": 0.03363635410184865,
						"acc_stderr,none": 0.03363635410184865,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.040832215386495764,
						"acc_stderr,none": 0.040832215386495764,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.25517241379310346,
						"acc_norm,none": 0.25517241379310346,
						"acc_norm_stderr,none": 0.03632984052707842,
						"acc_stderr,none": 0.03632984052707842,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.033133343292217204,
						"acc_stderr,none": 0.033133343292217204,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.03011304016776725,
						"acc_stderr,none": 0.03011304016776725,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.022360679774997897,
						"acc_stderr,none": 0.022360679774997897,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.029115639308759614,
						"acc_stderr,none": 0.029115639308759614,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3045977011494253,
						"acc_norm,none": 0.3045977011494253,
						"acc_norm_stderr,none": 0.03499115838809175,
						"acc_stderr,none": 0.03499115838809175,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.2831858407079646,
						"acc_norm,none": 0.2831858407079646,
						"acc_norm_stderr,none": 0.030036394245092295,
						"acc_stderr,none": 0.030036394245092295,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.035014387062967806,
						"acc_stderr,none": 0.035014387062967806,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.03186439492581517,
						"acc_stderr,none": 0.03186439492581517,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676975,
						"acc_stderr,none": 0.03410167836676975,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2546583850931677,
						"acc_norm,none": 0.2546583850931677,
						"acc_norm_stderr,none": 0.03444265995779324,
						"acc_stderr,none": 0.03444265995779324,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2875,
						"acc_norm,none": 0.2875,
						"acc_norm_stderr,none": 0.035893251060583956,
						"acc_stderr,none": 0.035893251060583956,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.033799766898963086,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5245057170080991,
						"acc_stderr,none": 0.010624599636447029,
						"alias": "glue",
						"f1,none": 0.6491011620815851,
						"f1_stderr,none": 0.00032108186827200434,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.519717187811193,
						"acc_norm,none": 0.7048396733718383,
						"acc_norm_stderr,none": 0.004551826272978058,
						"acc_stderr,none": 0.004985900172317698,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7109450805356103,
						"acc_stderr,none": 0.017702939922475264,
						"alias": "lambada",
						"perplexity,none": 3.876580206940784,
						"perplexity_stderr,none": 0.2428651059071946
					},
					"lambada_multilingual": {
						"acc,none": 0.5289734135455074,
						"acc_stderr,none": 0.08329939113702933,
						"alias": "lambada_multilingual",
						"perplexity,none": 22.680879254928186,
						"perplexity_stderr,none": 8.791298890806734
					},
					"lambada_openai": {
						"acc,none": 0.7442266640791771,
						"acc_stderr,none": 0.006078442596011083,
						"alias": " - lambada_openai",
						"perplexity,none": 3.4196024331302066,
						"perplexity_stderr,none": 0.06747647957277843
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4145158160294974,
						"acc_stderr,none": 0.006863414211397147,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 37.86535447337028,
						"perplexity_stderr,none": 2.1014369172734346
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7438385406559286,
						"acc_stderr,none": 0.006081466315674261,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.419331503965391,
						"perplexity_stderr,none": 0.06748888698297716
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.44964098583349504,
						"acc_stderr,none": 0.006930555736225027,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 30.32399464308149,
						"perplexity_stderr,none": 1.4782065624636873
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5379390646225499,
						"acc_stderr,none": 0.006945895434579802,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 17.95460992838133,
						"perplexity_stderr,none": 0.8745644704053318
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.4989326605860664,
						"acc_stderr,none": 0.006965961785703062,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 23.84110572584246,
						"perplexity_stderr,none": 1.2667454061998853
					},
					"lambada_standard": {
						"acc,none": 0.6778575587036678,
						"acc_stderr,none": 0.006510363942739272,
						"alias": " - lambada_standard",
						"perplexity,none": 4.3341068135275815,
						"perplexity_stderr,none": 0.09348149235782736
					},
					"logiqa": {
						"acc,none": 0.24731182795698925,
						"acc_norm,none": 0.27342549923195086,
						"acc_norm_stderr,none": 0.01748247454768128,
						"acc_stderr,none": 0.016922842446712386,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3124910981341689,
						"acc_stderr,none": 0.05895696162239771,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036624,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.35555555555555557,
						"acc_stderr,none": 0.04135176749720386,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.2894736842105263,
						"acc_stderr,none": 0.03690677986137283,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.38113207547169814,
						"acc_stderr,none": 0.029890609686286623,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3263888888888889,
						"acc_stderr,none": 0.03921067198982266,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909282,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3699421965317919,
						"acc_stderr,none": 0.0368122963339432,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.20588235294117646,
						"acc_stderr,none": 0.04023382273617746,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145632,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.32340425531914896,
						"acc_stderr,none": 0.030579442773610337,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21929824561403508,
						"acc_stderr,none": 0.03892431106518752,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2689655172413793,
						"acc_stderr,none": 0.03695183311650232,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.22486772486772486,
						"acc_stderr,none": 0.02150209607822914,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.04006168083848876,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.36129032258064514,
						"acc_stderr,none": 0.027327548447957546,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2561576354679803,
						"acc_stderr,none": 0.030712730070982592,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720683,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3878787878787879,
						"acc_stderr,none": 0.03804913653971011,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4090909090909091,
						"acc_stderr,none": 0.03502975799413007,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.43523316062176165,
						"acc_stderr,none": 0.03578038165008585,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.02323458108842849,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.026719240783712163,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2773109243697479,
						"acc_stderr,none": 0.02907937453948001,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.25165562913907286,
						"acc_stderr,none": 0.035433042343899844,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.344954128440367,
						"acc_stderr,none": 0.020380605405066955,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_stderr,none": 0.02541642838876747,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.45588235294117646,
						"acc_stderr,none": 0.03495624522015474,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.030685820596610812,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3542600896860987,
						"acc_stderr,none": 0.03210062154134987,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.35877862595419846,
						"acc_stderr,none": 0.04206739313864908,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.29734325185972377,
						"acc_stderr,none": 0.05539871215244521,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.30578512396694213,
						"acc_stderr,none": 0.04205953933884122,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3148148148148148,
						"acc_stderr,none": 0.04489931073591312,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.26380368098159507,
						"acc_stderr,none": 0.03462419931615623,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.042878587513404565,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4174757281553398,
						"acc_stderr,none": 0.048828405482122375,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.36324786324786323,
						"acc_stderr,none": 0.03150712523091264,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001974,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.3933588761174968,
						"acc_stderr,none": 0.017468556724503172,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.29190751445086704,
						"acc_stderr,none": 0.024476994076247316,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574898,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.026090162504279046,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.35693595107821047,
						"acc_stderr,none": 0.04488848662286952,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3279742765273312,
						"acc_stderr,none": 0.02666441088693761,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3425925925925926,
						"acc_stderr,none": 0.026406145973625658,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25177304964539005,
						"acc_stderr,none": 0.025892151156709405,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.27053455019556716,
						"acc_stderr,none": 0.011345996743539253,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.36764705882352944,
						"acc_stderr,none": 0.029289413409403192,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2973856209150327,
						"acc_stderr,none": 0.01849259653639695,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.41818181818181815,
						"acc_stderr,none": 0.0472457740573157,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.23673469387755103,
						"acc_stderr,none": 0.027212835884073153,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3282417939551511,
						"acc_stderr,none": 0.05593572854883733,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.40298507462686567,
						"acc_stderr,none": 0.03468343295111126,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2759276879162702,
						"acc_stderr,none": 0.06176008065466927,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.03680783690727581,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.42105263157894735,
						"acc_stderr,none": 0.03786720706234214,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.33958227203260316,
						"acc_stderr,none": 0.004780340579713731,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.34530105777054515,
						"acc_stderr,none": 0.004795356793592588,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.021463642763705344,
						"alias": " - mrpc",
						"f1,none": 0.840625,
						"f1_stderr,none": 0.01558715928894479
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.0664819944598338,
						"exact_match_stderr,remove_whitespace": 0.004146865296902489
					},
					"openbookqa": {
						"acc,none": 0.296,
						"acc_norm,none": 0.408,
						"acc_norm_stderr,none": 0.022000910893877196,
						"acc_stderr,none": 0.020435342091896142,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4355,
						"acc_stderr,none": 0.011089696374691104,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.375,
						"acc_stderr,none": 0.010828024891988879,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4285,
						"acc_stderr,none": 0.011068203447885417,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.01113040061763076,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.011117724672834362,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.511,
						"acc_stderr,none": 0.011180429374603772,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5125,
						"acc_stderr,none": 0.011179640744835738,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4807142857142857,
						"acc_stderr,none": 0.05275166826504779,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7622415669205659,
						"acc_norm,none": 0.7720348204570185,
						"acc_norm_stderr,none": 0.00978809383232491,
						"acc_stderr,none": 0.009932525779525483,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7299766179422126,
						"acc_norm,none": 0.611828937448934,
						"acc_norm_stderr,none": 0.010060762980937102,
						"acc_stderr,none": 0.1590104789971228,
						"alias": "pythia",
						"bits_per_byte,none": 0.6358205792814908,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5538212846214599,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4196024331302066,
						"perplexity_stderr,none": 0.06747647957277843,
						"word_perplexity,none": 10.556203501453586,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49789492952590153,
						"acc_stderr,none": 0.006765350592089551,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6056641108088053,
						"acc_stderr,none": 0.002430538880214674,
						"alias": " - qqp",
						"f1,none": 0.6474425598726256,
						"f1_stderr,none": 0.002611968306414811
					},
					"record": {
						"alias": "record",
						"em,none": 0.2746,
						"em_stderr,none": 0.004463348087211242,
						"f1,none": 0.2843119049996138,
						"f1_stderr,none": 0.00447246783111216
					},
					"rte": {
						"acc,none": 0.6570397111913358,
						"acc_stderr,none": 0.02857348326765378,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.942,
						"acc_norm,none": 0.92,
						"acc_norm_stderr,none": 0.00858333697775365,
						"acc_stderr,none": 0.007395315455792949,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.8876146788990825,
						"acc_stderr,none": 0.010701827730093276,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3181906504722058,
						"acc_stderr,none": 0.001512062510265972,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3182374541003672,
						"bleu_acc_stderr,none": 0.01630598864892062,
						"bleu_diff,none": -7.318250650318278,
						"bleu_diff_stderr,none": 0.8891580932461726,
						"bleu_max,none": 27.23746358054591,
						"bleu_max_stderr,none": 0.809736633362668,
						"rouge1_acc,none": 0.27906976744186046,
						"rouge1_acc_stderr,none": 0.015702107090627897,
						"rouge1_diff,none": -10.068802225564218,
						"rouge1_diff_stderr,none": 0.980821073542755,
						"rouge1_max,none": 51.94513875554285,
						"rouge1_max_stderr,none": 0.8975919278631865,
						"rouge2_acc,none": 0.25091799265605874,
						"rouge2_acc_stderr,none": 0.015176985027707694,
						"rouge2_diff,none": -12.067446550541952,
						"rouge2_diff_stderr,none": 1.1628228143432835,
						"rouge2_max,none": 35.78404244962929,
						"rouge2_max_stderr,none": 1.0543486082648192,
						"rougeL_acc,none": 0.28518971848225216,
						"rougeL_acc_stderr,none": 0.015805827874454892,
						"rougeL_diff,none": -10.256601078328625,
						"rougeL_diff_stderr,none": 0.9896808491563573,
						"rougeL_max,none": 49.22924469071218,
						"rougeL_max_stderr,none": 0.9162320195331894
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3182374541003672,
						"bleu_acc_stderr,none": 0.01630598864892062,
						"bleu_diff,none": -7.318250650318278,
						"bleu_diff_stderr,none": 0.8891580932461726,
						"bleu_max,none": 27.23746358054591,
						"bleu_max_stderr,none": 0.809736633362668,
						"rouge1_acc,none": 0.27906976744186046,
						"rouge1_acc_stderr,none": 0.015702107090627897,
						"rouge1_diff,none": -10.068802225564218,
						"rouge1_diff_stderr,none": 0.980821073542755,
						"rouge1_max,none": 51.94513875554285,
						"rouge1_max_stderr,none": 0.8975919278631865,
						"rouge2_acc,none": 0.25091799265605874,
						"rouge2_acc_stderr,none": 0.015176985027707694,
						"rouge2_diff,none": -12.067446550541952,
						"rouge2_diff_stderr,none": 1.1628228143432835,
						"rouge2_max,none": 35.78404244962929,
						"rouge2_max_stderr,none": 1.0543486082648192,
						"rougeL_acc,none": 0.28518971848225216,
						"rougeL_acc_stderr,none": 0.015805827874454892,
						"rougeL_diff,none": -10.256601078328625,
						"rougeL_diff_stderr,none": 0.9896808491563573,
						"rougeL_max,none": 49.22924469071218,
						"rougeL_max_stderr,none": 0.9162320195331894
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2460220318237454,
						"acc_stderr,none": 0.015077219200662588,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.39035926912066615,
						"acc_stderr,none": 0.013831463210732819,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6358205792814908,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5538212846214599,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.556203501453586,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6669297553275454,
						"acc_stderr,none": 0.013246194028070656,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.40384615384615385,
						"acc_stderr,none": 0.048346889526540184,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6156363636363636,
						"acc_stderr,none": 0.07188443941334852,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.02206494331392886,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.02236139673920787,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177524,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.752,
						"acc_stderr,none": 0.019332342821239103,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.022252153078595897,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.562,
						"acc_stderr,none": 0.022210326363977417,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.02161328916516578,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.020435342091896142,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.676,
						"acc_stderr,none": 0.020950557312477455,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43820615796519413,
						"acc_stderr,none": 0.05038433417312249,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.00944890091461761,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4670682730923695,
						"acc_stderr,none": 0.010000311392557843,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4907630522088353,
						"acc_stderr,none": 0.010020362530631358,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3923694779116466,
						"acc_stderr,none": 0.009787120838990108,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5293172690763053,
						"acc_stderr,none": 0.01000483004554399,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5008032128514056,
						"acc_stderr,none": 0.010022059935722385,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5004016064257029,
						"acc_stderr,none": 0.010022069634353856,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43172690763052207,
						"acc_stderr,none": 0.00992820318611292,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4907630522088353,
						"acc_stderr,none": 0.010020362530631355,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39196787148594375,
						"acc_stderr,none": 0.009785342947722884,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42208835341365464,
						"acc_stderr,none": 0.00989965271489542,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44457831325301206,
						"acc_stderr,none": 0.009960315726344817,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41445783132530123,
						"acc_stderr,none": 0.009874311310483538,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41485943775100403,
						"acc_stderr,none": 0.009875705744164683,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3485943775100402,
						"acc_stderr,none": 0.009551542053301817,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6252331388003128,
						"acc_stderr,none": 0.051720807244359346,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5936465916611515,
						"acc_stderr,none": 0.01263942942038987,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.771012574454004,
						"acc_stderr,none": 0.010813046586508217,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.700860357379219,
						"acc_stderr,none": 0.011783227411626324,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5585704831237591,
						"acc_stderr,none": 0.012778538985880637,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6015883520847121,
						"acc_stderr,none": 0.01259874393825287,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6604897418927862,
						"acc_stderr,none": 0.012186276146659444,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5380542686962276,
						"acc_stderr,none": 0.012829804720321707,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6790205162144275,
						"acc_stderr,none": 0.012014110213469808,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.557246856386499,
						"acc_stderr,none": 0.012782510750319241,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5936465916611515,
						"acc_stderr,none": 0.012639429420389868,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6234281932495036,
						"acc_stderr,none": 0.012468914489659354,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8136659923578332,
						"acc_stderr,none": 0.04622777589835031,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8718279569892473,
						"acc_stderr,none": 0.006934162057729848,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7228915662650602,
						"acc_stderr,none": 0.04942589299783091,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7413972888425443,
						"acc_stderr,none": 0.014146834702050056,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.8022813688212928,
						"acc_stderr,none": 0.024605744229700216,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6698412698412698,
						"acc_stderr,none": 0.026538875646287704,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7936507936507936,
						"acc_stderr,none": 0.01804397166082725,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/chunk0-0_8_pth"
	},
	"./rwkv-x-dev/chunk1-0_8_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6192220969560316,
						"acc_norm,none": 0.605129650507328,
						"acc_norm_stderr,none": 0.08791329203631255,
						"acc_stderr,none": 0.11081604995729714,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.355,
						"acc_stderr,none": 0.018025860722264586,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8190746268656717,
						"acc_stderr,none": 0.17422323443357235,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.27033327577275096,
						"acc_norm,none": 0.27033327577275096,
						"acc_norm_stderr,none": 0.04215696740688232,
						"acc_stderr,none": 0.04215696740688232,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5345467441661738,
						"acc_stderr,none": 0.10624937468295627,
						"alias": "glue",
						"f1,none": 0.6564317301662976,
						"f1_stderr,none": 0.00026848166439554384,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.7110421113914225,
						"acc_stderr,none": 0.018475299956026547,
						"alias": "lambada",
						"perplexity,none": 3.8337168896553884,
						"perplexity_stderr,none": 0.2507223958932869
					},
					"lambada_multilingual": {
						"acc,none": 0.5316514651659227,
						"acc_stderr,none": 0.08833419895282758,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.99826677989173,
						"perplexity_stderr,none": 8.741617958719598
					},
					"mmlu": {
						"acc,none": 0.3132744623273038,
						"acc_stderr,none": 0.05630047358730123,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.29373007438894794,
						"acc_stderr,none": 0.048179276673558105,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3614419053749598,
						"acc_stderr,none": 0.048761595196255045,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.322391940201495,
						"acc_stderr,none": 0.05185942323127035,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2860767522993974,
						"acc_stderr,none": 0.061592210035896496,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.47664285714285715,
						"acc_stderr,none": 0.05453530538901475,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7264457658583788,
						"acc_norm,none": 0.6096955483688431,
						"acc_norm_stderr,none": 0.009863339012311572,
						"acc_stderr,none": 0.1611300511276991,
						"alias": "pythia",
						"bits_per_byte,none": 0.6347183184916613,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5526345735618807,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3591197609191825,
						"perplexity_stderr,none": 0.06591618426752378,
						"word_perplexity,none": 10.513162984211142,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.31727303757631314,
						"acc_stderr,none": 0.001436576041308098,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -7.6589032057097075,
						"bleu_diff_stderr,none": 0.8875152415325809,
						"bleu_max,none": 27.253690883024817,
						"bleu_max_stderr,none": 0.8195684347928635,
						"rouge1_acc,none": 0.2827417380660955,
						"rouge1_acc_stderr,none": 0.015764770836777305,
						"rouge1_diff,none": -10.897300394194946,
						"rouge1_diff_stderr,none": 0.9616333403253924,
						"rouge1_max,none": 51.55579931889928,
						"rouge1_max_stderr,none": 0.903962328064303,
						"rouge2_acc,none": 0.25458996328029376,
						"rouge2_acc_stderr,none": 0.015250117079156494,
						"rouge2_diff,none": -12.62947792141572,
						"rouge2_diff_stderr,none": 1.1554150122828406,
						"rouge2_max,none": 35.49189322460967,
						"rouge2_max_stderr,none": 1.0560692284703668,
						"rougeL_acc,none": 0.28886168910648713,
						"rougeL_acc_stderr,none": 0.01586634640138431,
						"rougeL_diff,none": -11.158520185214215,
						"rougeL_diff_stderr,none": 0.9763851322080774,
						"rougeL_max,none": 48.717280984399075,
						"rougeL_max_stderr,none": 0.9256556806666052
					},
					"xcopa": {
						"acc,none": 0.6181818181818182,
						"acc_stderr,none": 0.06845837772697577,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4398661311914324,
						"acc_stderr,none": 0.05287857648054458,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6276998977197521,
						"acc_stderr,none": 0.05219078517475863,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.811643065857496,
						"acc_stderr,none": 0.04785618560441878,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6192220969560316,
						"acc_norm,none": 0.605129650507328,
						"acc_norm_stderr,none": 0.08791329203631255,
						"acc_stderr,none": 0.11081604995729714,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.355,
						"acc_stderr,none": 0.018025860722264586,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.377,
						"acc_stderr,none": 0.015333170125779864,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.351,
						"acc_stderr,none": 0.015100563798316407,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.013680495725767792,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3848122866894198,
						"acc_norm,none": 0.4197952218430034,
						"acc_norm_stderr,none": 0.014422181226303024,
						"acc_stderr,none": 0.014218371065251117,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7348484848484849,
						"acc_norm,none": 0.6965488215488216,
						"acc_norm_stderr,none": 0.009433837434252268,
						"acc_stderr,none": 0.009057621139172611,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8190746268656717,
						"acc_stderr,none": 0.17422323443357235,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491118,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346936,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731992,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695792,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.767,
						"acc_stderr,none": 0.013374972519220051,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.563,
						"acc_stderr,none": 0.015693223928730377,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.01258369378796812,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.01060525678479659,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469362,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403644,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.007088105617246443,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380708,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389627,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996662,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474911,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306512,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381786,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.693,
						"acc_stderr,none": 0.014593284892852634,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.731,
						"acc_stderr,none": 0.014029819522568196,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.751,
						"acc_stderr,none": 0.0136816002787023,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792951,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.01094526376104296,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910648,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.262,
						"acc_stderr,none": 0.013912208651021355,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024961,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319322,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858918,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.761,
						"acc_stderr,none": 0.013493000446937587,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315155,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.00985982840703718,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787743,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.639,
						"acc_stderr,none": 0.015195720118175122,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.00944924802766276,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.366,
						"acc_stderr,none": 0.015240612726405747,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.015459721957493379,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.015726771166750357,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333337,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.614,
						"acc_stderr,none": 0.015402637476784374,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345686,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662732,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425672,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.0070881056172464405,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000072,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491099,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.0109781838443578,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.509,
						"acc_stderr,none": 0.015816736995005392,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163046,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.010605256784796587,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256565,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.719,
						"acc_stderr,none": 0.014221154708434946,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.459,
						"acc_stderr,none": 0.015766025737882158,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.00953361892934099,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.00573383613969548,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.015615500115072956,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.010131468138756984,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381788,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.796,
						"acc_stderr,none": 0.012749374359024393,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.011171786285496501,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.0076870078762864115,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523727,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030036,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656803,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.376,
						"acc_stderr,none": 0.015325105508898132,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.306,
						"acc_stderr,none": 0.014580006055436969,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.27033327577275096,
						"acc_norm,none": 0.27033327577275096,
						"acc_norm_stderr,none": 0.04215696740688232,
						"acc_stderr,none": 0.04215696740688232,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.27702702702702703,
						"acc_norm,none": 0.27702702702702703,
						"acc_norm_stderr,none": 0.036911647897386525,
						"acc_stderr,none": 0.036911647897386525,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.033635910482728223,
						"acc_stderr,none": 0.033635910482728223,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.33125,
						"acc_norm,none": 0.33125,
						"acc_norm_stderr,none": 0.037325985139935236,
						"acc_stderr,none": 0.037325985139935236,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2606060606060606,
						"acc_norm,none": 0.2606060606060606,
						"acc_norm_stderr,none": 0.03427743175816524,
						"acc_stderr,none": 0.03427743175816524,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.27751196172248804,
						"acc_norm,none": 0.27751196172248804,
						"acc_norm_stderr,none": 0.03104734851984327,
						"acc_stderr,none": 0.03104734851984327,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2748091603053435,
						"acc_norm,none": 0.2748091603053435,
						"acc_norm_stderr,none": 0.039153454088478354,
						"acc_stderr,none": 0.039153454088478354,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.27941176470588236,
						"acc_norm,none": 0.27941176470588236,
						"acc_norm_stderr,none": 0.03861882389311728,
						"acc_stderr,none": 0.03861882389311728,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2897196261682243,
						"acc_norm,none": 0.2897196261682243,
						"acc_norm_stderr,none": 0.0440606533474851,
						"acc_stderr,none": 0.0440606533474851,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2755417956656347,
						"acc_norm,none": 0.2755417956656347,
						"acc_norm_stderr,none": 0.02489845928700082,
						"acc_stderr,none": 0.02489845928700082,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28921568627450983,
						"acc_norm,none": 0.28921568627450983,
						"acc_norm_stderr,none": 0.03182231867647553,
						"acc_stderr,none": 0.03182231867647553,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2681564245810056,
						"acc_norm,none": 0.2681564245810056,
						"acc_norm_stderr,none": 0.033204216306737144,
						"acc_stderr,none": 0.033204216306737144,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24472573839662448,
						"acc_norm,none": 0.24472573839662448,
						"acc_norm_stderr,none": 0.027985699387036416,
						"acc_stderr,none": 0.027985699387036416,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.1792452830188679,
						"acc_norm,none": 0.1792452830188679,
						"acc_norm_stderr,none": 0.03743138631255277,
						"acc_stderr,none": 0.03743138631255277,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3644859813084112,
						"acc_norm,none": 0.3644859813084112,
						"acc_norm_stderr,none": 0.046746602211107734,
						"acc_stderr,none": 0.046746602211107734,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980981,
						"acc_stderr,none": 0.03957835471980981,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.043362909039199406,
						"acc_stderr,none": 0.043362909039199406,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.25274725274725274,
						"acc_norm,none": 0.25274725274725274,
						"acc_norm_stderr,none": 0.026350722655564394,
						"acc_stderr,none": 0.026350722655564394,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.27450980392156865,
						"acc_norm,none": 0.27450980392156865,
						"acc_norm_stderr,none": 0.03132179803083291,
						"acc_stderr,none": 0.03132179803083291,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.03377310252209194,
						"acc_stderr,none": 0.03377310252209194,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.272108843537415,
						"acc_norm,none": 0.272108843537415,
						"acc_norm_stderr,none": 0.036832239154550236,
						"acc_stderr,none": 0.036832239154550236,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.26618705035971224,
						"acc_norm,none": 0.26618705035971224,
						"acc_norm_stderr,none": 0.03762240935089088,
						"acc_stderr,none": 0.03762240935089088,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.27044025157232704,
						"acc_norm,none": 0.27044025157232704,
						"acc_norm_stderr,none": 0.03533764101912227,
						"acc_stderr,none": 0.03533764101912227,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.294478527607362,
						"acc_norm,none": 0.294478527607362,
						"acc_norm_stderr,none": 0.03581165790474082,
						"acc_stderr,none": 0.03581165790474082,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.23837209302325582,
						"acc_norm,none": 0.23837209302325582,
						"acc_norm_stderr,none": 0.032583750685258935,
						"acc_stderr,none": 0.032583750685258935,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.02839429305079051,
						"acc_stderr,none": 0.02839429305079051,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.25252525252525254,
						"acc_norm,none": 0.25252525252525254,
						"acc_norm_stderr,none": 0.030954055470365904,
						"acc_stderr,none": 0.030954055470365904,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.33613445378151263,
						"acc_norm,none": 0.33613445378151263,
						"acc_norm_stderr,none": 0.03068473711513536,
						"acc_stderr,none": 0.03068473711513536,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2391304347826087,
						"acc_norm,none": 0.2391304347826087,
						"acc_norm_stderr,none": 0.02818738529393395,
						"acc_stderr,none": 0.02818738529393395,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.037498507091740206,
						"acc_stderr,none": 0.037498507091740206,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.036421927837417066,
						"acc_stderr,none": 0.036421927837417066,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2784090909090909,
						"acc_norm,none": 0.2784090909090909,
						"acc_norm_stderr,none": 0.03388193526335357,
						"acc_stderr,none": 0.03388193526335357,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2550335570469799,
						"acc_norm,none": 0.2550335570469799,
						"acc_norm_stderr,none": 0.03582912165111174,
						"acc_stderr,none": 0.03582912165111174,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101963,
						"acc_stderr,none": 0.03334150198101963,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714262,
						"acc_stderr,none": 0.04025566684714262,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.041220665028782834,
						"acc_stderr,none": 0.041220665028782834,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.26573426573426573,
						"acc_norm,none": 0.26573426573426573,
						"acc_norm_stderr,none": 0.0370686046262356,
						"acc_stderr,none": 0.0370686046262356,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.24603174603174602,
						"acc_norm,none": 0.24603174603174602,
						"acc_norm_stderr,none": 0.03852273364924315,
						"acc_stderr,none": 0.03852273364924315,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.032314709966177586,
						"acc_stderr,none": 0.032314709966177586,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.03407826167337437,
						"acc_stderr,none": 0.03407826167337437,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26277372262773724,
						"acc_norm,none": 0.26277372262773724,
						"acc_norm_stderr,none": 0.021736991810864862,
						"acc_stderr,none": 0.021736991810864862,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3130841121495327,
						"acc_norm,none": 0.3130841121495327,
						"acc_norm_stderr,none": 0.03177550735912672,
						"acc_stderr,none": 0.03177550735912672,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3089430894308943,
						"acc_norm,none": 0.3089430894308943,
						"acc_norm_stderr,none": 0.04183273258787623,
						"acc_stderr,none": 0.04183273258787623,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.28688524590163933,
						"acc_norm,none": 0.28688524590163933,
						"acc_norm_stderr,none": 0.041118866352671805,
						"acc_stderr,none": 0.041118866352671805,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2761904761904762,
						"acc_norm,none": 0.2761904761904762,
						"acc_norm_stderr,none": 0.03092739584327577,
						"acc_stderr,none": 0.03092739584327577,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.033477857599386325,
						"acc_stderr,none": 0.033477857599386325,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.291005291005291,
						"acc_norm,none": 0.291005291005291,
						"acc_norm_stderr,none": 0.0331278320035657,
						"acc_stderr,none": 0.0331278320035657,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.040832215386495764,
						"acc_stderr,none": 0.040832215386495764,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2482758620689655,
						"acc_norm,none": 0.2482758620689655,
						"acc_norm_stderr,none": 0.03600105692727771,
						"acc_stderr,none": 0.03600105692727771,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.042857142857142844,
						"acc_stderr,none": 0.042857142857142844,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.29714285714285715,
						"acc_norm,none": 0.29714285714285715,
						"acc_norm_stderr,none": 0.03464507889884373,
						"acc_stderr,none": 0.03464507889884373,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.25118483412322273,
						"acc_norm,none": 0.25118483412322273,
						"acc_norm_stderr,none": 0.029927771242945198,
						"acc_stderr,none": 0.029927771242945198,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.022360679774997897,
						"acc_stderr,none": 0.022360679774997897,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.28879310344827586,
						"acc_norm,none": 0.28879310344827586,
						"acc_norm_stderr,none": 0.029818472937938982,
						"acc_stderr,none": 0.029818472937938982,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.28160919540229884,
						"acc_norm,none": 0.28160919540229884,
						"acc_norm_stderr,none": 0.03419642820708565,
						"acc_stderr,none": 0.03419642820708565,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800254,
						"acc_stderr,none": 0.03885004245800254,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.27876106194690264,
						"acc_norm,none": 0.27876106194690264,
						"acc_norm_stderr,none": 0.029892647352308947,
						"acc_stderr,none": 0.029892647352308947,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.28484848484848485,
						"acc_norm,none": 0.28484848484848485,
						"acc_norm_stderr,none": 0.03524390844511782,
						"acc_stderr,none": 0.03524390844511782,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.03209281645145385,
						"acc_stderr,none": 0.03209281645145385,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.034101678366769764,
						"acc_stderr,none": 0.034101678366769764,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03571428571428569,
						"acc_stderr,none": 0.03571428571428569,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2875,
						"acc_norm,none": 0.2875,
						"acc_norm_stderr,none": 0.035893251060583956,
						"acc_stderr,none": 0.035893251060583956,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.03265986323710906,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5345467441661738,
						"acc_stderr,none": 0.10624937468295627,
						"alias": "glue",
						"f1,none": 0.6564317301662976,
						"f1_stderr,none": 0.00026848166439554384,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.5210117506472814,
						"acc_norm,none": 0.7057359091814379,
						"acc_norm_stderr,none": 0.004547798964126658,
						"acc_stderr,none": 0.00498537355077511,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7110421113914225,
						"acc_stderr,none": 0.018475299956026547,
						"alias": "lambada",
						"perplexity,none": 3.8337168896553884,
						"perplexity_stderr,none": 0.2507223958932869
					},
					"lambada_multilingual": {
						"acc,none": 0.5316514651659227,
						"acc_stderr,none": 0.08833419895282758,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.99826677989173,
						"perplexity_stderr,none": 8.741617958719598
					},
					"lambada_openai": {
						"acc,none": 0.7451969726372987,
						"acc_stderr,none": 0.006070855636710545,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3591197609191825,
						"perplexity_stderr,none": 0.06591618426752378
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4131573840481273,
						"acc_stderr,none": 0.0068601032857639275,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 36.86145472227436,
						"perplexity_stderr,none": 2.032525523667424
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.746749466330293,
						"acc_stderr,none": 0.006058634002437438,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3570581837482854,
						"perplexity_stderr,none": 0.06581055774657171
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45099941781486513,
						"acc_stderr,none": 0.0069324455308038945,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.47378456459302,
						"perplexity_stderr,none": 1.4355537219510561
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5429846691247817,
						"acc_stderr,none": 0.006940188097931742,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 17.27619251581423,
						"perplexity_stderr,none": 0.8322583096057345
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5043663885115467,
						"acc_stderr,none": 0.006965712034542295,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 23.022843913028737,
						"perplexity_stderr,none": 1.2170718018395523
					},
					"lambada_standard": {
						"acc,none": 0.6763050650106734,
						"acc_stderr,none": 0.006518555157810555,
						"alias": " - lambada_standard",
						"perplexity,none": 4.308770090126191,
						"perplexity_stderr,none": 0.09235827313161507
					},
					"logiqa": {
						"acc,none": 0.2565284178187404,
						"acc_norm,none": 0.2764976958525346,
						"acc_norm_stderr,none": 0.017543209075825177,
						"acc_stderr,none": 0.01712944332788756,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3132744623273038,
						"acc_stderr,none": 0.05630047358730123,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036624,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.35555555555555557,
						"acc_stderr,none": 0.04135176749720386,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.2565789473684211,
						"acc_stderr,none": 0.0355418036802569,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145633,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.39245283018867927,
						"acc_stderr,none": 0.030052580579557838,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3125,
						"acc_stderr,none": 0.038760854559127644,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252604,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909283,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.35260115606936415,
						"acc_stderr,none": 0.03643037168958548,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.04533838195929774,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939098,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3702127659574468,
						"acc_stderr,none": 0.03156564682236784,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03947152782669415,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.25517241379310346,
						"acc_stderr,none": 0.03632984052707842,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24338624338624337,
						"acc_stderr,none": 0.022101128787415422,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.040061680838488774,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3870967741935484,
						"acc_stderr,none": 0.027709359675032495,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.26108374384236455,
						"acc_stderr,none": 0.0309037969521145,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3696969696969697,
						"acc_stderr,none": 0.03769430314512567,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.37373737373737376,
						"acc_stderr,none": 0.03446897738659333,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.46113989637305697,
						"acc_stderr,none": 0.03597524411734578,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.31794871794871793,
						"acc_stderr,none": 0.02361088430892786,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25555555555555554,
						"acc_stderr,none": 0.026593939101844065,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.028657491285071977,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2781456953642384,
						"acc_stderr,none": 0.03658603262763743,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3137614678899083,
						"acc_stderr,none": 0.019894723341469123,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.17592592592592593,
						"acc_stderr,none": 0.025967420958258526,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4068627450980392,
						"acc_stderr,none": 0.03447891136353382,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3459915611814346,
						"acc_stderr,none": 0.03096481058878671,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.36771300448430494,
						"acc_stderr,none": 0.03236198350928275,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3511450381679389,
						"acc_stderr,none": 0.04186445163013751,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.29373007438894794,
						"acc_stderr,none": 0.048179276673558105,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2892561983471074,
						"acc_stderr,none": 0.04139112727635463,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.32407407407407407,
						"acc_stderr,none": 0.04524596007030049,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3067484662576687,
						"acc_stderr,none": 0.036230899157241474,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.29464285714285715,
						"acc_stderr,none": 0.043270409325787296,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.44660194174757284,
						"acc_stderr,none": 0.04922424153458933,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3803418803418803,
						"acc_stderr,none": 0.031804252043840985,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.39846743295019155,
						"acc_stderr,none": 0.01750743860277742,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2774566473988439,
						"acc_stderr,none": 0.024105712607754307,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574898,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3104575163398693,
						"acc_stderr,none": 0.026493033225145894,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3614419053749598,
						"acc_stderr,none": 0.048761595196255045,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.35691318327974275,
						"acc_stderr,none": 0.027210420375934023,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.02622964917882116,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2375886524822695,
						"acc_stderr,none": 0.025389512552729906,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2633637548891786,
						"acc_stderr,none": 0.011249506403605284,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.35661764705882354,
						"acc_stderr,none": 0.029097209568411945,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2875816993464052,
						"acc_stderr,none": 0.018311653053648222,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.36363636363636365,
						"acc_stderr,none": 0.04607582090719976,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2571428571428571,
						"acc_stderr,none": 0.02797982353874455,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.322391940201495,
						"acc_stderr,none": 0.05185942323127035,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.42786069651741293,
						"acc_stderr,none": 0.03498541988407795,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2860767522993974,
						"acc_stderr,none": 0.061592210035896496,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.03680783690727581,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.40350877192982454,
						"acc_stderr,none": 0.03762738699917056,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3464085583290881,
						"acc_stderr,none": 0.004803131292236268,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3554719283970708,
						"acc_stderr,none": 0.004827527158278539,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7352941176470589,
						"acc_stderr,none": 0.02186830575426217,
						"alias": " - mrpc",
						"f1,none": 0.8328173374613003,
						"f1_stderr,none": 0.015922798423725182
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.06703601108033241,
						"exact_match_stderr,remove_whitespace": 0.004162872230666932
					},
					"openbookqa": {
						"acc,none": 0.29,
						"acc_norm,none": 0.402,
						"acc_norm_stderr,none": 0.021948929609938612,
						"acc_stderr,none": 0.02031317923174518,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.432,
						"acc_stderr,none": 0.011079231683079109,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3735,
						"acc_stderr,none": 0.010819306988058641,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.011000477501118884,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5475,
						"acc_stderr,none": 0.011132557743886098,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.011127079848413744,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5115,
						"acc_stderr,none": 0.011180177690296078,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.01117991481396971,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.47664285714285715,
						"acc_stderr,none": 0.05453530538901475,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7573449401523396,
						"acc_norm,none": 0.766050054406964,
						"acc_norm_stderr,none": 0.009877236895137446,
						"acc_stderr,none": 0.010002002569708698,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7264457658583788,
						"acc_norm,none": 0.6096955483688431,
						"acc_norm_stderr,none": 0.009863339012311572,
						"acc_stderr,none": 0.1611300511276991,
						"alias": "pythia",
						"bits_per_byte,none": 0.6347183184916613,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5526345735618807,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3591197609191825,
						"perplexity_stderr,none": 0.06591618426752378,
						"word_perplexity,none": 10.513162984211142,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.4966135822807981,
						"acc_stderr,none": 0.006765255380909213,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6192678703932724,
						"acc_stderr,none": 0.0024149188327367076,
						"alias": " - qqp",
						"f1,none": 0.6549351027819499,
						"f1_stderr,none": 0.002616454010465506
					},
					"record": {
						"alias": "record",
						"em,none": 0.2774,
						"em_stderr,none": 0.004477379668162045,
						"f1,none": 0.287568571428571,
						"f1_stderr,none": 0.004486306897998369
					},
					"rte": {
						"acc,none": 0.6462093862815884,
						"acc_stderr,none": 0.028780957835424684,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.95,
						"acc_norm,none": 0.932,
						"acc_norm_stderr,none": 0.007964887911291603,
						"acc_stderr,none": 0.006895472974897891,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9036697247706422,
						"acc_stderr,none": 0.009997172579825119,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.31727303757631314,
						"acc_stderr,none": 0.001436576041308098,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -7.6589032057097075,
						"bleu_diff_stderr,none": 0.8875152415325809,
						"bleu_max,none": 27.253690883024817,
						"bleu_max_stderr,none": 0.8195684347928635,
						"rouge1_acc,none": 0.2827417380660955,
						"rouge1_acc_stderr,none": 0.015764770836777305,
						"rouge1_diff,none": -10.897300394194946,
						"rouge1_diff_stderr,none": 0.9616333403253924,
						"rouge1_max,none": 51.55579931889928,
						"rouge1_max_stderr,none": 0.903962328064303,
						"rouge2_acc,none": 0.25458996328029376,
						"rouge2_acc_stderr,none": 0.015250117079156494,
						"rouge2_diff,none": -12.62947792141572,
						"rouge2_diff_stderr,none": 1.1554150122828406,
						"rouge2_max,none": 35.49189322460967,
						"rouge2_max_stderr,none": 1.0560692284703668,
						"rougeL_acc,none": 0.28886168910648713,
						"rougeL_acc_stderr,none": 0.01586634640138431,
						"rougeL_diff,none": -11.158520185214215,
						"rougeL_diff_stderr,none": 0.9763851322080774,
						"rougeL_max,none": 48.717280984399075,
						"rougeL_max_stderr,none": 0.9256556806666052
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3084455324357405,
						"bleu_acc_stderr,none": 0.01616803938315687,
						"bleu_diff,none": -7.6589032057097075,
						"bleu_diff_stderr,none": 0.8875152415325809,
						"bleu_max,none": 27.253690883024817,
						"bleu_max_stderr,none": 0.8195684347928635,
						"rouge1_acc,none": 0.2827417380660955,
						"rouge1_acc_stderr,none": 0.015764770836777305,
						"rouge1_diff,none": -10.897300394194946,
						"rouge1_diff_stderr,none": 0.9616333403253924,
						"rouge1_max,none": 51.55579931889928,
						"rouge1_max_stderr,none": 0.903962328064303,
						"rouge2_acc,none": 0.25458996328029376,
						"rouge2_acc_stderr,none": 0.015250117079156494,
						"rouge2_diff,none": -12.62947792141572,
						"rouge2_diff_stderr,none": 1.1554150122828406,
						"rouge2_max,none": 35.49189322460967,
						"rouge2_max_stderr,none": 1.0560692284703668,
						"rougeL_acc,none": 0.28886168910648713,
						"rougeL_acc_stderr,none": 0.01586634640138431,
						"rougeL_diff,none": -11.158520185214215,
						"rougeL_diff_stderr,none": 0.9763851322080774,
						"rougeL_max,none": 48.717280984399075,
						"rougeL_max_stderr,none": 0.9256556806666052
					},
					"truthfulqa_mc1": {
						"acc,none": 0.24724602203182375,
						"acc_stderr,none": 0.015102404797359652,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.38730005312080257,
						"acc_stderr,none": 0.013854015296505396,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6347183184916613,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5526345735618807,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.513162984211142,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6708760852407262,
						"acc_stderr,none": 0.013206387089091477,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.46153846153846156,
						"acc_stderr,none": 0.04912048887947827,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6181818181818182,
						"acc_stderr,none": 0.06845837772697577,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.021930844120728505,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.022357273881016403,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.02014357284729079,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.019536923574747605,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.022261697292270132,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.02216263442665284,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.622,
						"acc_stderr,none": 0.021706550824518184,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.020475118092988964,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.0206670329874661,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4398661311914324,
						"acc_stderr,none": 0.05287857648054458,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3365461847389558,
						"acc_stderr,none": 0.00947142305417714,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4598393574297189,
						"acc_stderr,none": 0.009989691810169668,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.493574297188755,
						"acc_stderr,none": 0.010021245217159394,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38313253012048193,
						"acc_stderr,none": 0.009744464994287525,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5429718875502008,
						"acc_stderr,none": 0.009984991084561272,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5072289156626506,
						"acc_stderr,none": 0.010021025361119637,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5036144578313253,
						"acc_stderr,none": 0.010021811000966344,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43373493975903615,
						"acc_stderr,none": 0.009933667945702083,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4939759036144578,
						"acc_stderr,none": 0.010021345444047586,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.38835341365461845,
						"acc_stderr,none": 0.009769028875673286,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41767068273092367,
						"acc_stderr,none": 0.009885277727840166,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.45943775100401607,
						"acc_stderr,none": 0.0099890398747869,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40963855421686746,
						"acc_stderr,none": 0.009857049962123554,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41646586345381525,
						"acc_stderr,none": 0.009881215932115986,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.35180722891566263,
						"acc_stderr,none": 0.009571764897113621,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6276998977197521,
						"acc_stderr,none": 0.05219078517475863,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5949702183984117,
						"acc_stderr,none": 0.012632887218751382,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7749834546657842,
						"acc_stderr,none": 0.01074644865596448,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7074784910655195,
						"acc_stderr,none": 0.011707038572975033,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5605559232296492,
						"acc_stderr,none": 0.01277240869797916,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6015883520847121,
						"acc_stderr,none": 0.012598743938252869,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6611515552614163,
						"acc_stderr,none": 0.012180490758739039,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5367306419589676,
						"acc_stderr,none": 0.012832359240206969,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6757114493712773,
						"acc_stderr,none": 0.012046419229995328,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5579086697551291,
						"acc_stderr,none": 0.012780536370279766,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.598941098610192,
						"acc_stderr,none": 0.012612688318767067,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6346790205162144,
						"acc_stderr,none": 0.012391557728373989,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.811643065857496,
						"acc_stderr,none": 0.04785618560441878,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8675268817204301,
						"acc_stderr,none": 0.007032136436579815,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7228915662650602,
						"acc_stderr,none": 0.04942589299783091,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7476538060479666,
						"acc_stderr,none": 0.01403349677309752,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7756653992395437,
						"acc_stderr,none": 0.025771203207084706,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6603174603174603,
						"acc_stderr,none": 0.02672687475429402,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.8035714285714286,
						"acc_stderr,none": 0.01771456857704913,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/chunk1-0_8_pth"
	},
	"./rwkv-x-dev/chunk2-0_8_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6065388951521984,
						"acc_norm,none": 0.5679255918827508,
						"acc_norm_stderr,none": 0.0845047606529486,
						"acc_stderr,none": 0.11286787338019778,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.35875,
						"acc_stderr,none": 0.017490820609317875,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8153134328358209,
						"acc_stderr,none": 0.17618433301299535,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2701605940252116,
						"acc_norm,none": 0.2701605940252116,
						"acc_norm_stderr,none": 0.04074731236557746,
						"acc_stderr,none": 0.04074731236557746,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.49283099987150214,
						"acc_stderr,none": 0.09250324653551364,
						"alias": "glue",
						"f1,none": 0.6236455334464017,
						"f1_stderr,none": 0.0003739521447741664,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.7096836794100524,
						"acc_stderr,none": 0.017660556184646136,
						"alias": "lambada",
						"perplexity,none": 3.8761390885885967,
						"perplexity_stderr,none": 0.2531723197933758
					},
					"lambada_multilingual": {
						"acc,none": 0.5295167863380555,
						"acc_stderr,none": 0.08305205160948256,
						"alias": "lambada_multilingual",
						"perplexity,none": 22.15195513091255,
						"perplexity_stderr,none": 8.584147601677085
					},
					"mmlu": {
						"acc,none": 0.313416892180601,
						"acc_stderr,none": 0.05561374572019982,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2933049946865037,
						"acc_stderr,none": 0.046598589095393356,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.36240746700997745,
						"acc_stderr,none": 0.04605945691360844,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3305167370815729,
						"acc_stderr,none": 0.05257695502284729,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.27846495401205196,
						"acc_stderr,none": 0.058635662473005674,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.48292857142857143,
						"acc_stderr,none": 0.04567080200579658,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7230631998927546,
						"acc_norm,none": 0.5737405397100933,
						"acc_norm_stderr,none": 0.009494046945832852,
						"acc_stderr,none": 0.1630645110355812,
						"alias": "pythia",
						"bits_per_byte,none": 0.6349396649788684,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5528728058731467,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3983808787680854,
						"perplexity_stderr,none": 0.06657049172904408,
						"word_perplexity,none": 10.521791904248214,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3206355260973532,
						"acc_stderr,none": 0.0015131841571734118,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3182374541003672,
						"bleu_acc_stderr,none": 0.016305988648920647,
						"bleu_diff,none": -7.209796886800406,
						"bleu_diff_stderr,none": 0.8790521367278639,
						"bleu_max,none": 27.158121151053578,
						"bleu_max_stderr,none": 0.8254464911854278,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.015550778332842905,
						"rouge1_diff,none": -10.283164451195915,
						"rouge1_diff_stderr,none": 0.9645947055419382,
						"rouge1_max,none": 51.52073813484144,
						"rouge1_max_stderr,none": 0.910396656024746,
						"rouge2_acc,none": 0.2484700122399021,
						"rouge2_acc_stderr,none": 0.015127427096520669,
						"rouge2_diff,none": -12.130635058800555,
						"rouge2_diff_stderr,none": 1.1548348145659848,
						"rouge2_max,none": 35.24780001673698,
						"rouge2_max_stderr,none": 1.0657777110440778,
						"rougeL_acc,none": 0.2729498164014688,
						"rougeL_acc_stderr,none": 0.015594753632006545,
						"rougeL_diff,none": -10.497772375526804,
						"rougeL_diff_stderr,none": 0.9805790084032298,
						"rougeL_max,none": 48.78943756476912,
						"rougeL_max_stderr,none": 0.9294453641265302
					},
					"xcopa": {
						"acc,none": 0.6174545454545454,
						"acc_stderr,none": 0.06920111980541176,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4394912985274431,
						"acc_stderr,none": 0.05192179084294283,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6276397328680585,
						"acc_stderr,none": 0.05381832131804611,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8154641492470218,
						"acc_stderr,none": 0.03695317788092073,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6065388951521984,
						"acc_norm,none": 0.5679255918827508,
						"acc_norm_stderr,none": 0.0845047606529486,
						"acc_stderr,none": 0.11286787338019778,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.35875,
						"acc_stderr,none": 0.017490820609317875,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.379,
						"acc_stderr,none": 0.01534909100222535,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.353,
						"acc_stderr,none": 0.015120172605483694,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3466666666666667,
						"acc_stderr,none": 0.01374402255057195,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3677474402730375,
						"acc_norm,none": 0.38993174061433444,
						"acc_norm_stderr,none": 0.01425295984889289,
						"acc_stderr,none": 0.014090995618168465,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7243265993265994,
						"acc_norm,none": 0.6557239057239057,
						"acc_norm_stderr,none": 0.00974949532159081,
						"acc_stderr,none": 0.009169229476542563,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8153134328358209,
						"acc_stderr,none": 0.17618433301299535,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651535,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403647,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.01202162715773199,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345707,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.766,
						"acc_stderr,none": 0.013394902889660013,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.015752210388771844,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.012795613612786564,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469362,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565578,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697081,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897878,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.006199874066337066,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.00949157995752503,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474906,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.0043194510829106325,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713329,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.672,
						"acc_stderr,none": 0.01485384248727033,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.729,
						"acc_stderr,none": 0.014062601350986186,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.752,
						"acc_stderr,none": 0.013663187134877646,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406728,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.010878848714333315,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306496,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.258,
						"acc_stderr,none": 0.013842963108656603,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340982,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.012510816141264348,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.594,
						"acc_stderr,none": 0.0155372264386346,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.756,
						"acc_stderr,none": 0.013588548437881424,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696837,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.0077997330618320366,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996692,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.649,
						"acc_stderr,none": 0.015100563798316405,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.0096168333396958,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.386,
						"acc_stderr,none": 0.015402637476784366,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.0155944601441406,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.01577824302490459,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592076,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.545,
						"acc_stderr,none": 0.01575510149834709,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653862,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695796,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.772,
						"acc_stderr,none": 0.013273740700804473,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697048,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578128,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118581,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160336,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.513,
						"acc_stderr,none": 0.01581395210189663,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.0070246242138171594,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.864,
						"acc_stderr,none": 0.010845350230472988,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745029898,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.705,
						"acc_stderr,none": 0.014428554438445512,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.01574000469338385,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340985,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.007088105617246439,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.01562562511262067,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408021,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381786,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.012795613612786555,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.01107581480856704,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706806,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074789,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178314,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318237,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.382,
						"acc_stderr,none": 0.015372453034968526,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.297,
						"acc_stderr,none": 0.014456832294801101,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2701605940252116,
						"acc_norm,none": 0.2701605940252116,
						"acc_norm_stderr,none": 0.04074731236557746,
						"acc_stderr,none": 0.04074731236557746,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.033603007963315286,
						"acc_stderr,none": 0.033603007963315286,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.036628698766429046,
						"acc_stderr,none": 0.036628698766429046,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.03334645408665337,
						"acc_stderr,none": 0.03334645408665337,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.31875,
						"acc_norm,none": 0.31875,
						"acc_norm_stderr,none": 0.03695556038536326,
						"acc_stderr,none": 0.03695556038536326,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.03501438706296781,
						"acc_stderr,none": 0.03501438706296781,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2679425837320574,
						"acc_norm,none": 0.2679425837320574,
						"acc_norm_stderr,none": 0.030708724295561353,
						"acc_stderr,none": 0.030708724295561353,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.26717557251908397,
						"acc_norm,none": 0.26717557251908397,
						"acc_norm_stderr,none": 0.038808483010823944,
						"acc_stderr,none": 0.038808483010823944,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.27941176470588236,
						"acc_norm,none": 0.27941176470588236,
						"acc_norm_stderr,none": 0.038618823893117264,
						"acc_stderr,none": 0.038618823893117264,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.27102803738317754,
						"acc_norm,none": 0.27102803738317754,
						"acc_norm_stderr,none": 0.04317273776566668,
						"acc_stderr,none": 0.04317273776566668,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.28173374613003094,
						"acc_norm,none": 0.28173374613003094,
						"acc_norm_stderr,none": 0.02506883953426914,
						"acc_stderr,none": 0.02506883953426914,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.27450980392156865,
						"acc_norm,none": 0.27450980392156865,
						"acc_norm_stderr,none": 0.03132179803083292,
						"acc_stderr,none": 0.03132179803083292,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2737430167597765,
						"acc_norm,none": 0.2737430167597765,
						"acc_norm_stderr,none": 0.03342001835130119,
						"acc_stderr,none": 0.03342001835130119,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422644,
						"acc_stderr,none": 0.028146970599422644,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.34579439252336447,
						"acc_norm,none": 0.34579439252336447,
						"acc_norm_stderr,none": 0.0461969359662258,
						"acc_stderr,none": 0.0461969359662258,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3490566037735849,
						"acc_norm,none": 0.3490566037735849,
						"acc_norm_stderr,none": 0.04651841326529027,
						"acc_stderr,none": 0.04651841326529027,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980982,
						"acc_stderr,none": 0.03957835471980982,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.04176466758604902,
						"acc_stderr,none": 0.04176466758604902,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.043960933774393765,
						"acc_stderr,none": 0.043960933774393765,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2564102564102564,
						"acc_norm,none": 0.2564102564102564,
						"acc_norm_stderr,none": 0.026475851706699707,
						"acc_stderr,none": 0.026475851706699707,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.27450980392156865,
						"acc_norm,none": 0.27450980392156865,
						"acc_norm_stderr,none": 0.031321798030832904,
						"acc_stderr,none": 0.031321798030832904,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2573099415204678,
						"acc_norm,none": 0.2573099415204678,
						"acc_norm_stderr,none": 0.03352799844161865,
						"acc_stderr,none": 0.03352799844161865,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.272108843537415,
						"acc_norm,none": 0.272108843537415,
						"acc_norm_stderr,none": 0.036832239154550236,
						"acc_stderr,none": 0.036832239154550236,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.03694846055443904,
						"acc_stderr,none": 0.03694846055443904,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.2893081761006289,
						"acc_norm,none": 0.2893081761006289,
						"acc_norm_stderr,none": 0.03607384789794789,
						"acc_stderr,none": 0.03607384789794789,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2822085889570552,
						"acc_norm,none": 0.2822085889570552,
						"acc_norm_stderr,none": 0.03536117886664742,
						"acc_stderr,none": 0.03536117886664742,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.30158730158730157,
						"acc_norm,none": 0.30158730158730157,
						"acc_norm_stderr,none": 0.02896848136826004,
						"acc_stderr,none": 0.02896848136826004,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.26262626262626265,
						"acc_norm,none": 0.26262626262626265,
						"acc_norm_stderr,none": 0.031353050095330855,
						"acc_stderr,none": 0.031353050095330855,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3277310924369748,
						"acc_norm,none": 0.3277310924369748,
						"acc_norm_stderr,none": 0.03048991141767323,
						"acc_stderr,none": 0.03048991141767323,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.23043478260869565,
						"acc_norm,none": 0.23043478260869565,
						"acc_norm_stderr,none": 0.027827807522276156,
						"acc_stderr,none": 0.027827807522276156,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.03712537833614867,
						"acc_stderr,none": 0.03712537833614867,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.26573426573426573,
						"acc_norm,none": 0.26573426573426573,
						"acc_norm_stderr,none": 0.03706860462623558,
						"acc_stderr,none": 0.03706860462623558,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03366618544627456,
						"acc_stderr,none": 0.03366618544627456,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2550335570469799,
						"acc_norm,none": 0.2550335570469799,
						"acc_norm_stderr,none": 0.03582912165111174,
						"acc_stderr,none": 0.03582912165111174,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2627118644067797,
						"acc_norm,none": 0.2627118644067797,
						"acc_norm_stderr,none": 0.04068792432070351,
						"acc_stderr,none": 0.04068792432070351,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.033346454086653377,
						"acc_stderr,none": 0.033346454086653377,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.041220665028782834,
						"acc_stderr,none": 0.041220665028782834,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.25874125874125875,
						"acc_norm,none": 0.25874125874125875,
						"acc_norm_stderr,none": 0.03675137438900237,
						"acc_stderr,none": 0.03675137438900237,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.038095238095238106,
						"acc_stderr,none": 0.038095238095238106,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.032314709966177586,
						"acc_stderr,none": 0.032314709966177586,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.03407826167337437,
						"acc_stderr,none": 0.03407826167337437,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26034063260340634,
						"acc_norm,none": 0.26034063260340634,
						"acc_norm_stderr,none": 0.021671797319809193,
						"acc_stderr,none": 0.021671797319809193,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.308411214953271,
						"acc_norm,none": 0.308411214953271,
						"acc_norm_stderr,none": 0.03164457376920024,
						"acc_stderr,none": 0.03164457376920024,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.04119323030208567,
						"acc_stderr,none": 0.04119323030208567,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.29508196721311475,
						"acc_norm,none": 0.29508196721311475,
						"acc_norm_stderr,none": 0.04146178164901212,
						"acc_stderr,none": 0.04146178164901212,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2761904761904762,
						"acc_norm,none": 0.2761904761904762,
						"acc_norm_stderr,none": 0.030927395843275775,
						"acc_stderr,none": 0.030927395843275775,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.03387720998298804,
						"acc_stderr,none": 0.03387720998298804,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.291005291005291,
						"acc_norm,none": 0.291005291005291,
						"acc_norm_stderr,none": 0.03312783200356568,
						"acc_stderr,none": 0.03312783200356568,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.03565998174135303,
						"acc_stderr,none": 0.03565998174135303,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2742857142857143,
						"acc_norm,none": 0.2742857142857143,
						"acc_norm_stderr,none": 0.03382281937517294,
						"acc_stderr,none": 0.03382281937517294,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.24644549763033174,
						"acc_norm,none": 0.24644549763033174,
						"acc_norm_stderr,none": 0.029737751726596828,
						"acc_stderr,none": 0.029737751726596828,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.022360679774997897,
						"acc_stderr,none": 0.022360679774997897,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3017241379310345,
						"acc_norm,none": 0.3017241379310345,
						"acc_norm_stderr,none": 0.03020039007523146,
						"acc_stderr,none": 0.03020039007523146,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.28735632183908044,
						"acc_norm,none": 0.28735632183908044,
						"acc_norm_stderr,none": 0.03440515707228721,
						"acc_stderr,none": 0.03440515707228721,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.2831858407079646,
						"acc_norm,none": 0.2831858407079646,
						"acc_norm_stderr,none": 0.03003639424509229,
						"acc_stderr,none": 0.03003639424509229,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.03567969772268048,
						"acc_stderr,none": 0.03567969772268048,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.032092816451453864,
						"acc_stderr,none": 0.032092816451453864,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2795031055900621,
						"acc_norm,none": 0.2795031055900621,
						"acc_norm_stderr,none": 0.035477203909303916,
						"acc_stderr,none": 0.035477203909303916,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2875,
						"acc_norm,none": 0.2875,
						"acc_norm_stderr,none": 0.035893251060583956,
						"acc_stderr,none": 0.035893251060583956,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.03379976689896309,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.49283099987150214,
						"acc_stderr,none": 0.09250324653551364,
						"alias": "glue",
						"f1,none": 0.6236455334464017,
						"f1_stderr,none": 0.0003739521447741664,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.5204142601075483,
						"acc_norm,none": 0.7064329814777933,
						"acc_norm_stderr,none": 0.00454465197604009,
						"acc_stderr,none": 0.004985620773683432,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7096836794100524,
						"acc_stderr,none": 0.017660556184646136,
						"alias": "lambada",
						"perplexity,none": 3.8761390885885967,
						"perplexity_stderr,none": 0.2531723197933758
					},
					"lambada_multilingual": {
						"acc,none": 0.5295167863380555,
						"acc_stderr,none": 0.08305205160948256,
						"alias": "lambada_multilingual",
						"perplexity,none": 22.15195513091255,
						"perplexity_stderr,none": 8.584147601677085
					},
					"lambada_openai": {
						"acc,none": 0.7430622938094315,
						"acc_stderr,none": 0.006087494839873366,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3983808787680854,
						"perplexity_stderr,none": 0.06657049172904408
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4133514457597516,
						"acc_stderr,none": 0.006860579569392179,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 36.883885303238124,
						"perplexity_stderr,none": 2.0258671086344098
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7426741703861828,
						"acc_stderr,none": 0.006090499663132531,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3961327754562762,
						"perplexity_stderr,none": 0.06649862915556323
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45158160294973804,
						"acc_stderr,none": 0.006933239470474418,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.870239215981673,
						"perplexity_stderr,none": 1.4505671105282827
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.53929749660392,
						"acc_stderr,none": 0.00694442930426461,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 17.413461649321327,
						"perplexity_stderr,none": 0.8414066297939647
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.500679215990685,
						"acc_stderr,none": 0.006965971229899205,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 23.196056710565347,
						"perplexity_stderr,none": 1.2238221727862812
					},
					"lambada_standard": {
						"acc,none": 0.676693188433922,
						"acc_stderr,none": 0.006516515049707134,
						"alias": " - lambada_standard",
						"perplexity,none": 4.356034778997471,
						"perplexity_stderr,none": 0.09279288216288628
					},
					"logiqa": {
						"acc,none": 0.23963133640552994,
						"acc_norm,none": 0.27956989247311825,
						"acc_norm_stderr,none": 0.017602909186822453,
						"acc_stderr,none": 0.016742766935101426,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.313416892180601,
						"acc_stderr,none": 0.05561374572019982,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.35555555555555557,
						"acc_stderr,none": 0.04135176749720386,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3223684210526316,
						"acc_stderr,none": 0.038035102483515854,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4037735849056604,
						"acc_stderr,none": 0.03019761160019795,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3263888888888889,
						"acc_stderr,none": 0.03921067198982266,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.36416184971098264,
						"acc_stderr,none": 0.03669072477416908,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.22549019607843138,
						"acc_stderr,none": 0.041583075330832865,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3148936170212766,
						"acc_stderr,none": 0.030363582197238167,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.04142439719489362,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2896551724137931,
						"acc_stderr,none": 0.037800192304380135,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.23015873015873015,
						"acc_stderr,none": 0.02167921966369314,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.04073524322147126,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.38064516129032255,
						"acc_stderr,none": 0.027621717832907042,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.26108374384236455,
						"acc_stderr,none": 0.03090379695211447,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3696969696969697,
						"acc_stderr,none": 0.03769430314512568,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3939393939393939,
						"acc_stderr,none": 0.03481285338232963,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.45077720207253885,
						"acc_stderr,none": 0.03590910952235523,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.31794871794871793,
						"acc_stderr,none": 0.023610884308927865,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23333333333333334,
						"acc_stderr,none": 0.02578787422095933,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2773109243697479,
						"acc_stderr,none": 0.029079374539480007,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3321100917431193,
						"acc_stderr,none": 0.020192682985423337,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.18981481481481483,
						"acc_stderr,none": 0.026744714834691947,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4166666666666667,
						"acc_stderr,none": 0.03460228327239172,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3206751054852321,
						"acc_stderr,none": 0.030381931949990407,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.35874439461883406,
						"acc_stderr,none": 0.032190792004199956,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3816793893129771,
						"acc_stderr,none": 0.0426073515764456,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2933049946865037,
						"acc_stderr,none": 0.046598589095393356,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.30578512396694213,
						"acc_stderr,none": 0.04205953933884123,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.32407407407407407,
						"acc_stderr,none": 0.0452459600703005,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.31901840490797545,
						"acc_stderr,none": 0.03661997551073836,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.04287858751340456,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4368932038834951,
						"acc_stderr,none": 0.04911147107365776,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3974358974358974,
						"acc_stderr,none": 0.032059534537892925,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.39846743295019155,
						"acc_stderr,none": 0.017507438602777408,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.30057803468208094,
						"acc_stderr,none": 0.024685316867257792,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.31699346405228757,
						"acc_stderr,none": 0.02664327847450875,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.36240746700997745,
						"acc_stderr,none": 0.04605945691360844,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3408360128617363,
						"acc_stderr,none": 0.026920841260776155,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.33641975308641975,
						"acc_stderr,none": 0.026289734945952926,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.24468085106382978,
						"acc_stderr,none": 0.025645553622266722,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.25945241199478486,
						"acc_stderr,none": 0.011195262076350326,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3713235294117647,
						"acc_stderr,none": 0.02934980313976587,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3022875816993464,
						"acc_stderr,none": 0.018579232711113877,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.38181818181818183,
						"acc_stderr,none": 0.046534298079135075,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.24081632653061225,
						"acc_stderr,none": 0.02737294220178816,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3305167370815729,
						"acc_stderr,none": 0.05257695502284729,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.4079601990049751,
						"acc_stderr,none": 0.034751163651940926,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.27846495401205196,
						"acc_stderr,none": 0.058635662473005674,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3253012048192771,
						"acc_stderr,none": 0.03647168523683229,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.391812865497076,
						"acc_stderr,none": 0.037439798259264,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.33560876209882834,
						"acc_stderr,none": 0.004766565516281546,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3363506916192026,
						"acc_stderr,none": 0.004765040953125705,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7352941176470589,
						"acc_stderr,none": 0.02186830575426217,
						"alias": " - mrpc",
						"f1,none": 0.8328173374613003,
						"f1_stderr,none": 0.015909841965899646
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.07396121883656509,
						"exact_match_stderr,remove_whitespace": 0.004356354125278422
					},
					"openbookqa": {
						"acc,none": 0.286,
						"acc_norm,none": 0.408,
						"acc_norm_stderr,none": 0.022000910893877186,
						"acc_stderr,none": 0.020229346329177514,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.448,
						"acc_stderr,none": 0.011122493197456274,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.385,
						"acc_stderr,none": 0.010883323176386978,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.423,
						"acc_stderr,none": 0.011049730687855397,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5465,
						"acc_stderr,none": 0.011134669525078666,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5475,
						"acc_stderr,none": 0.011132557743886098,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5085,
						"acc_stderr,none": 0.011181519941139164,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.011172305500884888,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48292857142857143,
						"acc_stderr,none": 0.04567080200579658,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7584330794341676,
						"acc_norm,none": 0.7665941240478781,
						"acc_norm_stderr,none": 0.009869247889520996,
						"acc_stderr,none": 0.009986718001804482,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7230631998927546,
						"acc_norm,none": 0.5737405397100933,
						"acc_norm_stderr,none": 0.009494046945832852,
						"acc_stderr,none": 0.1630645110355812,
						"alias": "pythia",
						"bits_per_byte,none": 0.6349396649788684,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5528728058731467,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3983808787680854,
						"perplexity_stderr,none": 0.06657049172904408,
						"word_perplexity,none": 10.521791904248214,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49807797913234486,
						"acc_stderr,none": 0.0067653605665169885,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.5565421716547119,
						"acc_stderr,none": 0.002470749422055446,
						"alias": " - qqp",
						"f1,none": 0.6218707160181377,
						"f1_stderr,none": 0.002618533405093682
					},
					"record": {
						"alias": "record",
						"em,none": 0.2789,
						"em_stderr,none": 0.00448480661222245,
						"f1,none": 0.28904190476190433,
						"f1_stderr,none": 0.0044932346272898325
					},
					"rte": {
						"acc,none": 0.6534296028880866,
						"acc_stderr,none": 0.028644456994557532,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.942,
						"acc_norm,none": 0.911,
						"acc_norm_stderr,none": 0.00900889339265154,
						"acc_stderr,none": 0.007395315455792949,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.8738532110091743,
						"acc_stderr,none": 0.01124988973632035,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3206355260973532,
						"acc_stderr,none": 0.0015131841571734118,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3182374541003672,
						"bleu_acc_stderr,none": 0.016305988648920647,
						"bleu_diff,none": -7.209796886800406,
						"bleu_diff_stderr,none": 0.8790521367278639,
						"bleu_max,none": 27.158121151053578,
						"bleu_max_stderr,none": 0.8254464911854278,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.015550778332842905,
						"rouge1_diff,none": -10.283164451195915,
						"rouge1_diff_stderr,none": 0.9645947055419382,
						"rouge1_max,none": 51.52073813484144,
						"rouge1_max_stderr,none": 0.910396656024746,
						"rouge2_acc,none": 0.2484700122399021,
						"rouge2_acc_stderr,none": 0.015127427096520669,
						"rouge2_diff,none": -12.130635058800555,
						"rouge2_diff_stderr,none": 1.1548348145659848,
						"rouge2_max,none": 35.24780001673698,
						"rouge2_max_stderr,none": 1.0657777110440778,
						"rougeL_acc,none": 0.2729498164014688,
						"rougeL_acc_stderr,none": 0.015594753632006545,
						"rougeL_diff,none": -10.497772375526804,
						"rougeL_diff_stderr,none": 0.9805790084032298,
						"rougeL_max,none": 48.78943756476912,
						"rougeL_max_stderr,none": 0.9294453641265302
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3182374541003672,
						"bleu_acc_stderr,none": 0.016305988648920647,
						"bleu_diff,none": -7.209796886800406,
						"bleu_diff_stderr,none": 0.8790521367278639,
						"bleu_max,none": 27.158121151053578,
						"bleu_max_stderr,none": 0.8254464911854278,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.015550778332842905,
						"rouge1_diff,none": -10.283164451195915,
						"rouge1_diff_stderr,none": 0.9645947055419382,
						"rouge1_max,none": 51.52073813484144,
						"rouge1_max_stderr,none": 0.910396656024746,
						"rouge2_acc,none": 0.2484700122399021,
						"rouge2_acc_stderr,none": 0.015127427096520669,
						"rouge2_diff,none": -12.130635058800555,
						"rouge2_diff_stderr,none": 1.1548348145659848,
						"rouge2_max,none": 35.24780001673698,
						"rouge2_max_stderr,none": 1.0657777110440778,
						"rougeL_acc,none": 0.2729498164014688,
						"rougeL_acc_stderr,none": 0.015594753632006545,
						"rougeL_diff,none": -10.497772375526804,
						"rougeL_diff_stderr,none": 0.9805790084032298,
						"rougeL_max,none": 48.78943756476912,
						"rougeL_max_stderr,none": 0.9294453641265302
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2484700122399021,
						"acc_stderr,none": 0.015127427096520677,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.39280103995480425,
						"acc_stderr,none": 0.013865852640302713,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6349396649788684,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5528728058731467,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.521791904248214,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6764009471191792,
						"acc_stderr,none": 0.013148883320923156,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.43661971830985913,
						"acc_stderr,none": 0.0592793555841297,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.46153846153846156,
						"acc_stderr,none": 0.04912048887947828,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6174545454545454,
						"acc_stderr,none": 0.06920111980541176,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.021874299301689253,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231326,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177528,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.019635965529725512,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.021683827539286115,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.020354375480530068,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.020629569998345396,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4394912985274431,
						"acc_stderr,none": 0.05192179084294283,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3353413654618474,
						"acc_stderr,none": 0.009463034891512706,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.46224899598393576,
						"acc_stderr,none": 0.009993466360872783,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4903614457831325,
						"acc_stderr,none": 0.010020210558438304,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3891566265060241,
						"acc_stderr,none": 0.009772702993836013,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5429718875502008,
						"acc_stderr,none": 0.009984991084561273,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5040160642570282,
						"acc_stderr,none": 0.0100217495745559,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4967871485943775,
						"acc_stderr,none": 0.010021865961119555,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43052208835341366,
						"acc_stderr,none": 0.009924844537285527,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4967871485943775,
						"acc_stderr,none": 0.010021865961119552,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.38795180722891565,
						"acc_stderr,none": 0.009767181346586384,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41485943775100403,
						"acc_stderr,none": 0.009875705744164682,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4602409638554217,
						"acc_stderr,none": 0.009990337216722657,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40120481927710844,
						"acc_stderr,none": 0.009824484469158989,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42128514056224897,
						"acc_stderr,none": 0.009897099560589201,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3586345381526104,
						"acc_stderr,none": 0.00961316490090988,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6276397328680585,
						"acc_stderr,none": 0.05381832131804611,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5949702183984117,
						"acc_stderr,none": 0.01263288721875138,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7776307081403044,
						"acc_stderr,none": 0.010701277694882511,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7107875579086698,
						"acc_stderr,none": 0.01166782538830548,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5579086697551291,
						"acc_stderr,none": 0.012780536370279769,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6029119788219722,
						"acc_stderr,none": 0.012591627740247462,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6618133686300464,
						"acc_stderr,none": 0.012174678796437402,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5360688285903376,
						"acc_stderr,none": 0.012833602406620013,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6750496360026472,
						"acc_stderr,none": 0.01205279844220021,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5526141628060887,
						"acc_stderr,none": 0.012795688167385313,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5936465916611515,
						"acc_stderr,none": 0.012639429420389868,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6406353408338848,
						"acc_stderr,none": 0.012347659802101677,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8154641492470218,
						"acc_stderr,none": 0.03695317788092073,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8709677419354839,
						"acc_stderr,none": 0.006953958940141567,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109368,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7497393117831074,
						"acc_stderr,none": 0.013994864706473825,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7946768060836502,
						"acc_stderr,none": 0.024955347906737913,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6666666666666666,
						"acc_stderr,none": 0.026602896148920776,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.8015873015873016,
						"acc_stderr,none": 0.017781817702298933,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/chunk2-0_8_pth"
	},
	"./rwkv-x-dev/chunk3-0_8_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6076662908680946,
						"acc_norm,none": 0.5698985343855694,
						"acc_norm_stderr,none": 0.08423133492503232,
						"acc_stderr,none": 0.11020366986938332,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.350625,
						"acc_stderr,none": 0.014764931085373748,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8127611940298507,
						"acc_stderr,none": 0.16924630231958332,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2775859091694006,
						"acc_norm,none": 0.2775859091694006,
						"acc_norm_stderr,none": 0.042812042689160584,
						"acc_stderr,none": 0.042812042689160584,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.48493330157217723,
						"acc_stderr,none": 0.006750892491368954,
						"alias": "glue",
						"f1,none": 0.6168644290668991,
						"f1_stderr,none": 0.00039107663308903223,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.7033766737822628,
						"acc_stderr,none": 0.020647121343565302,
						"alias": "lambada",
						"perplexity,none": 3.9103858907090427,
						"perplexity_stderr,none": 0.2749268035882682
					},
					"lambada_multilingual": {
						"acc,none": 0.5295555986803804,
						"acc_stderr,none": 0.08374803683649414,
						"alias": "lambada_multilingual",
						"perplexity,none": 22.01570998207655,
						"perplexity_stderr,none": 8.566392642976403
					},
					"mmlu": {
						"acc,none": 0.3052983905426577,
						"acc_stderr,none": 0.05110098275158574,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.28522848034006376,
						"acc_stderr,none": 0.04191181835420451,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.346636626971355,
						"acc_stderr,none": 0.04582707595251334,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.31751706207344815,
						"acc_stderr,none": 0.04824864481799704,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2825880114176974,
						"acc_stderr,none": 0.05650962037931627,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.47635714285714287,
						"acc_stderr,none": 0.04855559685412439,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7202829476801847,
						"acc_norm,none": 0.5757594693834329,
						"acc_norm_stderr,none": 0.00955828886891576,
						"acc_stderr,none": 0.15714848261755654,
						"alias": "pythia",
						"bits_per_byte,none": 0.6324349841476175,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5501791829444176,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3847790254129304,
						"perplexity_stderr,none": 0.06627913109695441,
						"word_perplexity,none": 10.424561977375479,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3183690764679285,
						"acc_stderr,none": 0.0014321059119448297,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31701346389228885,
						"bleu_acc_stderr,none": 0.016289203374403385,
						"bleu_diff,none": -8.17069606219374,
						"bleu_diff_stderr,none": 0.8785108620231448,
						"bleu_max,none": 26.911206556659653,
						"bleu_max_stderr,none": 0.8149427805833467,
						"rouge1_acc,none": 0.2778457772337821,
						"rouge1_acc_stderr,none": 0.01568092936402465,
						"rouge1_diff,none": -11.330310153461436,
						"rouge1_diff_stderr,none": 0.9469628774968426,
						"rouge1_max,none": 51.282484572330624,
						"rouge1_max_stderr,none": 0.9001016445598825,
						"rouge2_acc,none": 0.2423500611995104,
						"rouge2_acc_stderr,none": 0.015000674373570342,
						"rouge2_diff,none": -13.275679344084011,
						"rouge2_diff_stderr,none": 1.1253889849919436,
						"rouge2_max,none": 35.1620309329529,
						"rouge2_max_stderr,none": 1.0527329372742549,
						"rougeL_acc,none": 0.27906976744186046,
						"rougeL_acc_stderr,none": 0.015702107090627904,
						"rougeL_diff,none": -11.622390697041077,
						"rougeL_diff_stderr,none": 0.9582816524865938,
						"rougeL_max,none": 48.548243906754756,
						"rougeL_max_stderr,none": 0.921471312069278
					},
					"xcopa": {
						"acc,none": 0.6172727272727273,
						"acc_stderr,none": 0.07175602919670175,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4380187416331995,
						"acc_stderr,none": 0.04889854175767313,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6284820407917694,
						"acc_stderr,none": 0.059961796842324694,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.812766913913239,
						"acc_stderr,none": 0.03731063132566582,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6076662908680946,
						"acc_norm,none": 0.5698985343855694,
						"acc_norm_stderr,none": 0.08423133492503232,
						"acc_stderr,none": 0.11020366986938332,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.350625,
						"acc_stderr,none": 0.014764931085373748,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.355,
						"acc_stderr,none": 0.015139491543780532,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.015090650341444233,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3475,
						"acc_stderr,none": 0.013751753243291854,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.37457337883959047,
						"acc_norm,none": 0.3924914675767918,
						"acc_norm_stderr,none": 0.01426963463567071,
						"acc_stderr,none": 0.014144193471893437,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7226430976430976,
						"acc_norm,none": 0.6574074074074074,
						"acc_norm_stderr,none": 0.009738105469984189,
						"acc_stderr,none": 0.009186490105111902,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8127611940298507,
						"acc_stderr,none": 0.16924630231958332,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942323,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565855,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844884,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.012259457340938586,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.00944924802766274,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.013232501619085343,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.535,
						"acc_stderr,none": 0.01578049505003016,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.012886662332274538,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.010640169792499338,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098728,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319423,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474926,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817149,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345707,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.00828206451270416,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306495,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345698,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.675,
						"acc_stderr,none": 0.014818724459095524,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.014267009061031314,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.013912208651021349,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323504,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.010640169792499337,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306516,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.262,
						"acc_stderr,none": 0.013912208651021357,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525035,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.01241185135481633,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.015583544104177515,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.746,
						"acc_stderr,none": 0.013772206565168537,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139959,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823332,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177549,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787728,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.649,
						"acc_stderr,none": 0.015100563798316405,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.00965801621852431,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.387,
						"acc_stderr,none": 0.015410011955493937,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.599,
						"acc_stderr,none": 0.015506109745498334,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.527,
						"acc_stderr,none": 0.015796218551302612,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.01049924922240803,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.523,
						"acc_stderr,none": 0.015802554246726094,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.00944924802766272,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340992,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877364,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474916,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.001000000000000009,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118574,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378235,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.525,
						"acc_stderr,none": 0.015799513429996012,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.007088105617246446,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.01081165537241605,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689096,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.699,
						"acc_stderr,none": 0.014512395033543138,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.442,
						"acc_stderr,none": 0.015712507211864214,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151105,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792941,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.593,
						"acc_stderr,none": 0.015543249100255544,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397339,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108658,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.01301973553930781,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341678,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919291,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244073,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178357,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.00453647215130649,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.363,
						"acc_stderr,none": 0.015213890444671278,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.014356395999905682,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2775859091694006,
						"acc_norm,none": 0.2775859091694006,
						"acc_norm_stderr,none": 0.042812042689160584,
						"acc_stderr,none": 0.042812042689160584,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.30405405405405406,
						"acc_norm,none": 0.30405405405405406,
						"acc_norm_stderr,none": 0.03794062549620373,
						"acc_stderr,none": 0.03794062549620373,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.03612181848191273,
						"acc_stderr,none": 0.03612181848191273,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.28484848484848485,
						"acc_norm,none": 0.28484848484848485,
						"acc_norm_stderr,none": 0.03524390844511784,
						"acc_stderr,none": 0.03524390844511784,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.27751196172248804,
						"acc_norm,none": 0.27751196172248804,
						"acc_norm_stderr,none": 0.03104734851984328,
						"acc_stderr,none": 0.03104734851984328,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.03311643267635493,
						"acc_stderr,none": 0.03311643267635493,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.29770992366412213,
						"acc_norm,none": 0.29770992366412213,
						"acc_norm_stderr,none": 0.04010358942462202,
						"acc_stderr,none": 0.04010358942462202,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2867647058823529,
						"acc_norm,none": 0.2867647058823529,
						"acc_norm_stderr,none": 0.038923544178637824,
						"acc_stderr,none": 0.038923544178637824,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.29906542056074764,
						"acc_norm,none": 0.29906542056074764,
						"acc_norm_stderr,none": 0.044470182376718334,
						"acc_stderr,none": 0.044470182376718334,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.30959752321981426,
						"acc_norm,none": 0.30959752321981426,
						"acc_norm_stderr,none": 0.025764515105490108,
						"acc_stderr,none": 0.025764515105490108,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28921568627450983,
						"acc_norm,none": 0.28921568627450983,
						"acc_norm_stderr,none": 0.03182231867647553,
						"acc_stderr,none": 0.03182231867647553,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2849162011173184,
						"acc_norm,none": 0.2849162011173184,
						"acc_norm_stderr,none": 0.03383195081328524,
						"acc_stderr,none": 0.03383195081328524,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422644,
						"acc_stderr,none": 0.028146970599422644,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.040225592469367126,
						"acc_stderr,none": 0.040225592469367126,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.34579439252336447,
						"acc_norm,none": 0.34579439252336447,
						"acc_norm_stderr,none": 0.04619693596622581,
						"acc_stderr,none": 0.04619693596622581,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2037037037037037,
						"acc_norm,none": 0.2037037037037037,
						"acc_norm_stderr,none": 0.03893542518824847,
						"acc_stderr,none": 0.03893542518824847,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055042,
						"acc_stderr,none": 0.04232473532055042,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.04252016223763311,
						"acc_stderr,none": 0.04252016223763311,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2673992673992674,
						"acc_norm,none": 0.2673992673992674,
						"acc_norm_stderr,none": 0.026836713439088875,
						"acc_stderr,none": 0.026836713439088875,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.28921568627450983,
						"acc_norm,none": 0.28921568627450983,
						"acc_norm_stderr,none": 0.031822318676475544,
						"acc_stderr,none": 0.031822318676475544,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2573099415204678,
						"acc_norm,none": 0.2573099415204678,
						"acc_norm_stderr,none": 0.03352799844161865,
						"acc_stderr,none": 0.03352799844161865,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.037115139596751764,
						"acc_stderr,none": 0.037115139596751764,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2805755395683453,
						"acc_norm,none": 0.2805755395683453,
						"acc_norm_stderr,none": 0.03824529014900687,
						"acc_stderr,none": 0.03824529014900687,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3081761006289308,
						"acc_norm,none": 0.3081761006289308,
						"acc_norm_stderr,none": 0.03673404171124563,
						"acc_stderr,none": 0.03673404171124563,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2883435582822086,
						"acc_norm,none": 0.2883435582822086,
						"acc_norm_stderr,none": 0.035590395316173425,
						"acc_stderr,none": 0.035590395316173425,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2616279069767442,
						"acc_norm,none": 0.2616279069767442,
						"acc_norm_stderr,none": 0.03361101403890494,
						"acc_stderr,none": 0.03361101403890494,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.30158730158730157,
						"acc_norm,none": 0.30158730158730157,
						"acc_norm_stderr,none": 0.02896848136826005,
						"acc_stderr,none": 0.02896848136826005,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2676767676767677,
						"acc_norm,none": 0.2676767676767677,
						"acc_norm_stderr,none": 0.03154449888270286,
						"acc_stderr,none": 0.03154449888270286,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3487394957983193,
						"acc_norm,none": 0.3487394957983193,
						"acc_norm_stderr,none": 0.030956636328566545,
						"acc_stderr,none": 0.030956636328566545,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.02836109930007507,
						"acc_stderr,none": 0.02836109930007507,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.0374985070917402,
						"acc_stderr,none": 0.0374985070917402,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3146853146853147,
						"acc_norm,none": 0.3146853146853147,
						"acc_norm_stderr,none": 0.038970778815104114,
						"acc_stderr,none": 0.038970778815104114,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03366618544627455,
						"acc_stderr,none": 0.03366618544627455,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2550335570469799,
						"acc_norm,none": 0.2550335570469799,
						"acc_norm_stderr,none": 0.03582912165111174,
						"acc_stderr,none": 0.03582912165111174,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.033341501981019615,
						"acc_stderr,none": 0.033341501981019615,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714262,
						"acc_stderr,none": 0.04025566684714262,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2865853658536585,
						"acc_norm,none": 0.2865853658536585,
						"acc_norm_stderr,none": 0.03541638332993505,
						"acc_stderr,none": 0.03541638332993505,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.21818181818181817,
						"acc_norm,none": 0.21818181818181817,
						"acc_norm_stderr,none": 0.03955932861795833,
						"acc_stderr,none": 0.03955932861795833,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695624,
						"acc_stderr,none": 0.03737392962695624,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.038095238095238106,
						"acc_stderr,none": 0.038095238095238106,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.03273943999002353,
						"acc_stderr,none": 0.03273943999002353,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.03430085607014882,
						"acc_stderr,none": 0.03430085607014882,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2749391727493917,
						"acc_norm,none": 0.2749391727493917,
						"acc_norm_stderr,none": 0.02205025435599507,
						"acc_stderr,none": 0.02205025435599507,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3130841121495327,
						"acc_norm,none": 0.3130841121495327,
						"acc_norm_stderr,none": 0.03177550735912672,
						"acc_stderr,none": 0.03177550735912672,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2764227642276423,
						"acc_norm,none": 0.2764227642276423,
						"acc_norm_stderr,none": 0.040490154606224904,
						"acc_stderr,none": 0.040490154606224904,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2786885245901639,
						"acc_norm,none": 0.2786885245901639,
						"acc_norm_stderr,none": 0.0407594465906925,
						"acc_stderr,none": 0.0407594465906925,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.28095238095238095,
						"acc_norm,none": 0.28095238095238095,
						"acc_norm_stderr,none": 0.03109009446934461,
						"acc_stderr,none": 0.03109009446934461,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.033477857599386325,
						"acc_stderr,none": 0.033477857599386325,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.30687830687830686,
						"acc_norm,none": 0.30687830687830686,
						"acc_norm_stderr,none": 0.033636354101848634,
						"acc_stderr,none": 0.033636354101848634,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.040832215386495764,
						"acc_stderr,none": 0.040832215386495764,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2620689655172414,
						"acc_norm,none": 0.2620689655172414,
						"acc_norm_stderr,none": 0.03664666337225256,
						"acc_stderr,none": 0.03664666337225256,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2761904761904762,
						"acc_norm,none": 0.2761904761904762,
						"acc_norm_stderr,none": 0.04384295586918881,
						"acc_stderr,none": 0.04384295586918881,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2742857142857143,
						"acc_norm,none": 0.2742857142857143,
						"acc_norm_stderr,none": 0.033822819375172945,
						"acc_stderr,none": 0.033822819375172945,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26066350710900477,
						"acc_norm,none": 0.26066350710900477,
						"acc_norm_stderr,none": 0.030293645661742804,
						"acc_stderr,none": 0.030293645661742804,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.022517032434592296,
						"acc_stderr,none": 0.022517032434592296,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3146551724137931,
						"acc_norm,none": 0.3146551724137931,
						"acc_norm_stderr,none": 0.03055385529035679,
						"acc_stderr,none": 0.03055385529035679,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3045977011494253,
						"acc_norm,none": 0.3045977011494253,
						"acc_norm_stderr,none": 0.03499115838809175,
						"acc_stderr,none": 0.03499115838809175,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.28761061946902655,
						"acc_norm,none": 0.28761061946902655,
						"acc_norm_stderr,none": 0.030176573035509163,
						"acc_stderr,none": 0.030176573035509163,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.03588624800091707,
						"acc_stderr,none": 0.03588624800091707,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.03231470996617758,
						"acc_stderr,none": 0.03231470996617758,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.034101678366769764,
						"acc_stderr,none": 0.034101678366769764,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03571428571428569,
						"acc_stderr,none": 0.03571428571428569,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.03612181848191273,
						"acc_stderr,none": 0.03612181848191273,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.03487350880197771,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.48493330157217723,
						"acc_stderr,none": 0.006750892491368954,
						"alias": "glue",
						"f1,none": 0.6168644290668991,
						"f1_stderr,none": 0.00039107663308903223,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.522903804023103,
						"acc_norm,none": 0.7072296355307708,
						"acc_norm_stderr,none": 0.004541039698729831,
						"acc_stderr,none": 0.004984543540932337,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7033766737822628,
						"acc_stderr,none": 0.020647121343565302,
						"alias": "lambada",
						"perplexity,none": 3.9103858907090427,
						"perplexity_stderr,none": 0.2749268035882682
					},
					"lambada_multilingual": {
						"acc,none": 0.5295555986803804,
						"acc_stderr,none": 0.08374803683649414,
						"alias": "lambada_multilingual",
						"perplexity,none": 22.01570998207655,
						"perplexity_stderr,none": 8.566392642976403
					},
					"lambada_openai": {
						"acc,none": 0.7422860469629342,
						"acc_stderr,none": 0.006093498206249781,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3847790254129304,
						"perplexity_stderr,none": 0.06627913109695441
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.41102270522026,
						"acc_stderr,none": 0.006854791601399812,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 36.94089987350654,
						"perplexity_stderr,none": 2.0192513327606534
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7426741703861828,
						"acc_stderr,none": 0.006090499663132531,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.385555141807493,
						"perplexity_stderr,none": 0.06638147957608145
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4492528624102465,
						"acc_stderr,none": 0.006930006207066422,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.395681250265326,
						"perplexity_stderr,none": 1.422991621423236
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5402678051620415,
						"acc_stderr,none": 0.0069433502956647445,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 17.258010335336703,
						"perplexity_stderr,none": 0.8315916769934701
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.504560450223171,
						"acc_stderr,none": 0.006965687898451475,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 23.098403309466686,
						"perplexity_stderr,none": 1.2177825070518606
					},
					"lambada_standard": {
						"acc,none": 0.6640791771783427,
						"acc_stderr,none": 0.00658022080375575,
						"alias": " - lambada_standard",
						"perplexity,none": 4.435163105017095,
						"perplexity_stderr,none": 0.09520652714891847
					},
					"logiqa": {
						"acc,none": 0.2488479262672811,
						"acc_norm,none": 0.27035330261136714,
						"acc_norm_stderr,none": 0.017420694783393142,
						"acc_stderr,none": 0.016957985904525585,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3052983905426577,
						"acc_stderr,none": 0.05110098275158574,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.039427724440366234,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.35555555555555557,
						"acc_stderr,none": 0.04135176749720386,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.27631578947368424,
						"acc_stderr,none": 0.03639057569952925,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.39245283018867927,
						"acc_stderr,none": 0.03005258057955785,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3263888888888889,
						"acc_stderr,none": 0.03921067198982266,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720683,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.042295258468165065,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3352601156069364,
						"acc_stderr,none": 0.03599586301247077,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.27450980392156865,
						"acc_stderr,none": 0.044405219061793295,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.32340425531914896,
						"acc_stderr,none": 0.030579442773610337,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21052631578947367,
						"acc_stderr,none": 0.038351539543994194,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2689655172413793,
						"acc_stderr,none": 0.03695183311650232,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.25132275132275134,
						"acc_stderr,none": 0.022340482339643898,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.04073524322147127,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3967741935483871,
						"acc_stderr,none": 0.02783123160576794,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2512315270935961,
						"acc_stderr,none": 0.030516530732694436,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3151515151515151,
						"acc_stderr,none": 0.0362773057502241,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.35858585858585856,
						"acc_stderr,none": 0.03416903640391521,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.39896373056994816,
						"acc_stderr,none": 0.03533999094065695,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.30512820512820515,
						"acc_stderr,none": 0.023346335293325887,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.02671924078371218,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.27310924369747897,
						"acc_stderr,none": 0.028942004040998167,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3211009174311927,
						"acc_stderr,none": 0.020018149772733747,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.1712962962962963,
						"acc_stderr,none": 0.02569534164382469,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.033086111132364364,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3037974683544304,
						"acc_stderr,none": 0.02993669638713861,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.34977578475336324,
						"acc_stderr,none": 0.03200736719484503,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3816793893129771,
						"acc_stderr,none": 0.0426073515764456,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.28522848034006376,
						"acc_stderr,none": 0.04191181835420451,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.30578512396694213,
						"acc_stderr,none": 0.042059539338841226,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3425925925925926,
						"acc_stderr,none": 0.045879047413018105,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.32515337423312884,
						"acc_stderr,none": 0.036803503712864595,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3125,
						"acc_stderr,none": 0.043994650575715215,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4563106796116505,
						"acc_stderr,none": 0.04931801994220416,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3418803418803419,
						"acc_stderr,none": 0.03107502852650775,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.3895274584929757,
						"acc_stderr,none": 0.0174380825562646,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3236994219653179,
						"acc_stderr,none": 0.02519018132760842,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2335195530726257,
						"acc_stderr,none": 0.014149575348976266,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.31699346405228757,
						"acc_stderr,none": 0.026643278474508755,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.346636626971355,
						"acc_stderr,none": 0.04582707595251334,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3536977491961415,
						"acc_stderr,none": 0.02715520810320088,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.02622964917882117,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.24113475177304963,
						"acc_stderr,none": 0.02551873104953777,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2522816166883963,
						"acc_stderr,none": 0.011092789056875229,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.31985294117647056,
						"acc_stderr,none": 0.02833295951403122,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2908496732026144,
						"acc_stderr,none": 0.018373116915903966,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.38181818181818183,
						"acc_stderr,none": 0.046534298079135075,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2530612244897959,
						"acc_stderr,none": 0.02783302387139968,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.31751706207344815,
						"acc_stderr,none": 0.04824864481799704,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.40298507462686567,
						"acc_stderr,none": 0.034683432951111266,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2825880114176974,
						"acc_stderr,none": 0.05650962037931627,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252605,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3132530120481928,
						"acc_stderr,none": 0.03610805018031023,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3508771929824561,
						"acc_stderr,none": 0.03660298834049162,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3371370351502802,
						"acc_stderr,none": 0.004771908221905982,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.33899511798209925,
						"acc_stderr,none": 0.0047741955911452605,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7254901960784313,
						"acc_stderr,none": 0.022120630385010488,
						"alias": " - mrpc",
						"f1,none": 0.8287461773700305,
						"f1_stderr,none": 0.01592325530766116
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.07617728531855955,
						"exact_match_stderr,remove_whitespace": 0.004415842989116639
					},
					"openbookqa": {
						"acc,none": 0.286,
						"acc_norm,none": 0.408,
						"acc_norm_stderr,none": 0.02200091089387719,
						"acc_stderr,none": 0.020229346329177524,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.011102325468811016,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.368,
						"acc_stderr,none": 0.010786388324483072,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4265,
						"acc_stderr,none": 0.01106164793453104,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5495,
						"acc_stderr,none": 0.011128198119942883,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.011130400617630765,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5055,
						"acc_stderr,none": 0.011182459420867636,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4965,
						"acc_stderr,none": 0.011182862030875937,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.47635714285714287,
						"acc_stderr,none": 0.04855559685412439,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7600652883569097,
						"acc_norm,none": 0.7725788900979326,
						"acc_norm_stderr,none": 0.009779850767847246,
						"acc_stderr,none": 0.009963625892809545,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7202829476801847,
						"acc_norm,none": 0.5757594693834329,
						"acc_norm_stderr,none": 0.00955828886891576,
						"acc_stderr,none": 0.15714848261755654,
						"alias": "pythia",
						"bits_per_byte,none": 0.6324349841476175,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5501791829444176,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3847790254129304,
						"perplexity_stderr,none": 0.06627913109695441,
						"word_perplexity,none": 10.424561977375479,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49899322716456157,
						"acc_stderr,none": 0.006765396837036612,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.542072718278506,
						"acc_stderr,none": 0.002477881507779244,
						"alias": " - qqp",
						"f1,none": 0.6150295267404142,
						"f1_stderr,none": 0.002611606759288344
					},
					"record": {
						"alias": "record",
						"em,none": 0.2822,
						"em_stderr,none": 0.004500926775029807,
						"f1,none": 0.2926785716950893,
						"f1_stderr,none": 0.004509704095363895
					},
					"rte": {
						"acc,none": 0.6353790613718412,
						"acc_stderr,none": 0.028972282465132407,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.941,
						"acc_norm,none": 0.917,
						"acc_norm_stderr,none": 0.008728527206074789,
						"acc_stderr,none": 0.00745483565040673,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.8922018348623854,
						"acc_stderr,none": 0.010508195955513574,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3183690764679285,
						"acc_stderr,none": 0.0014321059119448297,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31701346389228885,
						"bleu_acc_stderr,none": 0.016289203374403385,
						"bleu_diff,none": -8.17069606219374,
						"bleu_diff_stderr,none": 0.8785108620231448,
						"bleu_max,none": 26.911206556659653,
						"bleu_max_stderr,none": 0.8149427805833467,
						"rouge1_acc,none": 0.2778457772337821,
						"rouge1_acc_stderr,none": 0.01568092936402465,
						"rouge1_diff,none": -11.330310153461436,
						"rouge1_diff_stderr,none": 0.9469628774968426,
						"rouge1_max,none": 51.282484572330624,
						"rouge1_max_stderr,none": 0.9001016445598825,
						"rouge2_acc,none": 0.2423500611995104,
						"rouge2_acc_stderr,none": 0.015000674373570342,
						"rouge2_diff,none": -13.275679344084011,
						"rouge2_diff_stderr,none": 1.1253889849919436,
						"rouge2_max,none": 35.1620309329529,
						"rouge2_max_stderr,none": 1.0527329372742549,
						"rougeL_acc,none": 0.27906976744186046,
						"rougeL_acc_stderr,none": 0.015702107090627904,
						"rougeL_diff,none": -11.622390697041077,
						"rougeL_diff_stderr,none": 0.9582816524865938,
						"rougeL_max,none": 48.548243906754756,
						"rougeL_max_stderr,none": 0.921471312069278
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.31701346389228885,
						"bleu_acc_stderr,none": 0.016289203374403385,
						"bleu_diff,none": -8.17069606219374,
						"bleu_diff_stderr,none": 0.8785108620231448,
						"bleu_max,none": 26.911206556659653,
						"bleu_max_stderr,none": 0.8149427805833467,
						"rouge1_acc,none": 0.2778457772337821,
						"rouge1_acc_stderr,none": 0.01568092936402465,
						"rouge1_diff,none": -11.330310153461436,
						"rouge1_diff_stderr,none": 0.9469628774968426,
						"rouge1_max,none": 51.282484572330624,
						"rouge1_max_stderr,none": 0.9001016445598825,
						"rouge2_acc,none": 0.2423500611995104,
						"rouge2_acc_stderr,none": 0.015000674373570342,
						"rouge2_diff,none": -13.275679344084011,
						"rouge2_diff_stderr,none": 1.1253889849919436,
						"rouge2_max,none": 35.1620309329529,
						"rouge2_max_stderr,none": 1.0527329372742549,
						"rougeL_acc,none": 0.27906976744186046,
						"rougeL_acc_stderr,none": 0.015702107090627904,
						"rougeL_diff,none": -11.622390697041077,
						"rougeL_diff_stderr,none": 0.9582816524865938,
						"rougeL_max,none": 48.548243906754756,
						"rougeL_max_stderr,none": 0.921471312069278
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2484700122399021,
						"acc_stderr,none": 0.015127427096520702,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3882681406959549,
						"acc_stderr,none": 0.013827306917470866,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6324349841476175,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5501791829444176,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.424561977375479,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6842936069455406,
						"acc_stderr,none": 0.01306309474300081,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5352112676056338,
						"acc_stderr,none": 0.05961305784972239,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.46153846153846156,
						"acc_stderr,none": 0.04912048887947828,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6172727272727273,
						"acc_stderr,none": 0.07175602919670175,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.021930844120728505,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.02236139673920787,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177517,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.746,
						"acc_stderr,none": 0.019486596801643382,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407056,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228134,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.628,
						"acc_stderr,none": 0.0216371979857224,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.020553269174209188,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.020667032987466104,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4380187416331995,
						"acc_stderr,none": 0.04889854175767313,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667055,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4642570281124498,
						"acc_stderr,none": 0.009996432468510364,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4859437751004016,
						"acc_stderr,none": 0.010018111813088544,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38353413654618473,
						"acc_stderr,none": 0.009746396613443776,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5397590361445783,
						"acc_stderr,none": 0.009990337216722659,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5020080321285141,
						"acc_stderr,none": 0.010021992045038411,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5040160642570282,
						"acc_stderr,none": 0.010021749574555901,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43694779116465865,
						"acc_stderr,none": 0.009942066394610857,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4899598393574297,
						"acc_stderr,none": 0.010020052116889137,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39116465863453814,
						"acc_stderr,none": 0.009781766322010006,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40240963855421685,
						"acc_stderr,none": 0.009829321288467446,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4506024096385542,
						"acc_stderr,none": 0.009973042774811676,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40441767068273093,
						"acc_stderr,none": 0.009837245625453007,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42008032128514056,
						"acc_stderr,none": 0.009893219469115698,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3614457831325301,
						"acc_stderr,none": 0.009629594988040054,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6284820407917694,
						"acc_stderr,none": 0.059961796842324694,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5976174718729318,
						"acc_stderr,none": 0.012619516819528715,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7703507610853739,
						"acc_stderr,none": 0.010824012610568654,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7121111846459298,
						"acc_stderr,none": 0.01165192890685479,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5592322964923891,
						"acc_stderr,none": 0.012776518586332788,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5982792852415619,
						"acc_stderr,none": 0.012616114526927898,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6644606221045665,
						"acc_stderr,none": 0.01215116443816391,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5400397088021178,
						"acc_stderr,none": 0.01282580237008399,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6783587028457975,
						"acc_stderr,none": 0.012020627225185137,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5532759761747187,
						"acc_stderr,none": 0.012793874526730203,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.599602911978822,
						"acc_stderr,none": 0.012609238175551176,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6399735274652548,
						"acc_stderr,none": 0.012352638981498531,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.812766913913239,
						"acc_stderr,none": 0.03731063132566582,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8696774193548387,
						"acc_stderr,none": 0.006983463551504547,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109368,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.748696558915537,
						"acc_stderr,none": 0.014014234546353822,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.779467680608365,
						"acc_stderr,none": 0.025614420399944937,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6603174603174603,
						"acc_stderr,none": 0.02672687475429403,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7976190476190477,
						"acc_stderr,none": 0.017914248052567798,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/chunk3-0_8_pth"
	},
	"./rwkv-x-dev/chunk4-0_85_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6279594137542277,
						"acc_norm,none": 0.6189402480270575,
						"acc_norm_stderr,none": 0.09195883581314192,
						"acc_stderr,none": 0.10811713320643039,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3496875,
						"acc_stderr,none": 0.014821224195427491,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.816716417910448,
						"acc_stderr,none": 0.17299095815732607,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.28017613538249003,
						"acc_norm,none": 0.28017613538249003,
						"acc_norm_stderr,none": 0.04586439454801138,
						"acc_stderr,none": 0.04586439454801138,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5113582708828945,
						"acc_stderr,none": 0.1036636683531409,
						"alias": "glue",
						"f1,none": 0.6383019269107849,
						"f1_stderr,none": 0.0003179766579110715,
						"mcc,none": 0.0463559874942472,
						"mcc_stderr,none": 0.0008660003314516894
					},
					"lambada": {
						"acc,none": 0.7037647972055113,
						"acc_stderr,none": 0.019450623856608992,
						"alias": "lambada",
						"perplexity,none": 3.9456430000489986,
						"perplexity_stderr,none": 0.26754502765061006
					},
					"lambada_multilingual": {
						"acc,none": 0.5299049097613041,
						"acc_stderr,none": 0.08663238993399297,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.832662609803585,
						"perplexity_stderr,none": 8.592974755362796
					},
					"mmlu": {
						"acc,none": 0.2962540948582823,
						"acc_stderr,none": 0.0486422309898191,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.28119022316684383,
						"acc_stderr,none": 0.037336581125248576,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.33569359510782104,
						"acc_stderr,none": 0.0447965839682438,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3015924601884952,
						"acc_stderr,none": 0.04324646530474508,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2746590548683794,
						"acc_stderr,none": 0.05856213087396464,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.47764285714285715,
						"acc_stderr,none": 0.0523955794519198,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7224499720971671,
						"acc_norm,none": 0.6232583000718513,
						"acc_norm_stderr,none": 0.010506116865743794,
						"acc_stderr,none": 0.16007281175301402,
						"alias": "pythia",
						"bits_per_byte,none": 0.6337524429111209,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5515954420159002,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4353905376803837,
						"perplexity_stderr,none": 0.06777153398895426,
						"word_perplexity,none": 10.475592252333357,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3181315055752817,
						"acc_stderr,none": 0.0015563184053450274,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3157894736842105,
						"bleu_acc_stderr,none": 0.016272287957916916,
						"bleu_diff,none": -7.1138104757292915,
						"bleu_diff_stderr,none": 0.8896674570142871,
						"bleu_max,none": 27.634044112680957,
						"bleu_max_stderr,none": 0.8152347414856879,
						"rouge1_acc,none": 0.28886168910648713,
						"rouge1_acc_stderr,none": 0.015866346401384308,
						"rouge1_diff,none": -9.13306929357122,
						"rouge1_diff_stderr,none": 0.9718099134947739,
						"rouge1_max,none": 52.703545231860005,
						"rouge1_max_stderr,none": 0.8758160603535571,
						"rouge2_acc,none": 0.2607099143206854,
						"rouge2_acc_stderr,none": 0.015368841620766368,
						"rouge2_diff,none": -11.172027287599581,
						"rouge2_diff_stderr,none": 1.1582012906683734,
						"rouge2_max,none": 36.570962366414335,
						"rouge2_max_stderr,none": 1.0423242358530302,
						"rougeL_acc,none": 0.28518971848225216,
						"rougeL_acc_stderr,none": 0.015805827874454892,
						"rougeL_diff,none": -9.543931809407418,
						"rougeL_diff_stderr,none": 0.9851215131831138,
						"rougeL_max,none": 50.03581883945255,
						"rougeL_max_stderr,none": 0.8992774655403986
					},
					"xcopa": {
						"acc,none": 0.6176363636363635,
						"acc_stderr,none": 0.07342809337816081,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43617135207496655,
						"acc_stderr,none": 0.04912194143307382,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6273389086095903,
						"acc_stderr,none": 0.060280339947664276,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8107439874129018,
						"acc_stderr,none": 0.03588827224009229,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6279594137542277,
						"acc_norm,none": 0.6189402480270575,
						"acc_norm_stderr,none": 0.09195883581314192,
						"acc_stderr,none": 0.10811713320643039,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3496875,
						"acc_stderr,none": 0.014821224195427491,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.351,
						"acc_stderr,none": 0.015100563798316405,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.355,
						"acc_stderr,none": 0.015139491543780532,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3441666666666667,
						"acc_stderr,none": 0.013720551062295756,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3993174061433447,
						"acc_norm,none": 0.4249146757679181,
						"acc_norm_stderr,none": 0.014445698968520769,
						"acc_stderr,none": 0.014312094557946707,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7407407407407407,
						"acc_norm,none": 0.7146464646464646,
						"acc_norm_stderr,none": 0.009266280584997753,
						"acc_stderr,none": 0.008992251535805513,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.816716417910448,
						"acc_stderr,none": 0.17299095815732607,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942309,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045044,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346936,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.821,
						"acc_stderr,none": 0.012128730605719123,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662753,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.772,
						"acc_stderr,none": 0.013273740700804483,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.015726771166750357,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.788,
						"acc_stderr,none": 0.01293148186493804,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837957,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448825,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403636,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295441,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571407,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033852,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.00940661918462125,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487904,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274703,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408042,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.679,
						"acc_stderr,none": 0.014770821817934649,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.014111099288259587,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.754,
						"acc_stderr,none": 0.013626065817750638,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504401,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910642,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.251,
						"acc_stderr,none": 0.01371813351688892,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340983,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.01248626873437014,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.015499685165842594,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.761,
						"acc_stderr,none": 0.013493000446937586,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704159,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855755,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323485,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866435,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.659,
						"acc_stderr,none": 0.014998131348402702,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724425,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.388,
						"acc_stderr,none": 0.015417317979911077,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858922,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.543,
						"acc_stderr,none": 0.01576069159013639,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024966,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.627,
						"acc_stderr,none": 0.01530049362292281,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653855,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524315,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.782,
						"acc_stderr,none": 0.013063179040595294,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706822,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578128,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333449,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.838,
						"acc_stderr,none": 0.01165726777130443,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.515,
						"acc_stderr,none": 0.015812179641814895,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140921,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397342,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.0039698563903194095,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.01447084674113471,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.437,
						"acc_stderr,none": 0.015693223928730373,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651521,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295436,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.599,
						"acc_stderr,none": 0.015506109745498318,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.01049924922240803,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557843986,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.842,
						"acc_stderr,none": 0.011539894677559568,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792944,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177549,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.0042063872496114945,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.00493957481969846,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.381,
						"acc_stderr,none": 0.015364734787007436,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.313,
						"acc_stderr,none": 0.014671272822977885,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.28017613538249003,
						"acc_norm,none": 0.28017613538249003,
						"acc_norm_stderr,none": 0.04586439454801138,
						"acc_stderr,none": 0.04586439454801138,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.033071627503231775,
						"acc_stderr,none": 0.033071627503231775,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.3108108108108108,
						"acc_norm,none": 0.3108108108108108,
						"acc_norm_stderr,none": 0.03817320450441154,
						"acc_stderr,none": 0.03817320450441154,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.033346454086653377,
						"acc_stderr,none": 0.033346454086653377,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.325,
						"acc_norm,none": 0.325,
						"acc_norm_stderr,none": 0.03714454174077367,
						"acc_stderr,none": 0.03714454174077367,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03477691162163659,
						"acc_stderr,none": 0.03477691162163659,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.291866028708134,
						"acc_norm,none": 0.291866028708134,
						"acc_norm_stderr,none": 0.03152229446041968,
						"acc_stderr,none": 0.03152229446041968,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.034049163262375844,
						"acc_stderr,none": 0.034049163262375844,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2900763358778626,
						"acc_norm,none": 0.2900763358778626,
						"acc_norm_stderr,none": 0.03980066246467765,
						"acc_stderr,none": 0.03980066246467765,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3014705882352941,
						"acc_norm,none": 0.3014705882352941,
						"acc_norm_stderr,none": 0.03949552929827394,
						"acc_stderr,none": 0.03949552929827394,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2803738317757009,
						"acc_norm,none": 0.2803738317757009,
						"acc_norm_stderr,none": 0.043628399335701014,
						"acc_stderr,none": 0.043628399335701014,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.30030959752321984,
						"acc_norm,none": 0.30030959752321984,
						"acc_norm_stderr,none": 0.025545218898401934,
						"acc_stderr,none": 0.025545218898401934,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.031660096793998116,
						"acc_stderr,none": 0.031660096793998116,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.26256983240223464,
						"acc_norm,none": 0.26256983240223464,
						"acc_norm_stderr,none": 0.03298168673967123,
						"acc_stderr,none": 0.03298168673967123,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24472573839662448,
						"acc_norm,none": 0.24472573839662448,
						"acc_norm_stderr,none": 0.027985699387036423,
						"acc_stderr,none": 0.027985699387036423,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.1792452830188679,
						"acc_norm,none": 0.1792452830188679,
						"acc_norm_stderr,none": 0.03743138631255277,
						"acc_stderr,none": 0.03743138631255277,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.35514018691588783,
						"acc_norm,none": 0.35514018691588783,
						"acc_norm_stderr,none": 0.04648144634449114,
						"acc_stderr,none": 0.04648144634449114,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3867924528301887,
						"acc_norm,none": 0.3867924528301887,
						"acc_norm_stderr,none": 0.04752784159123842,
						"acc_stderr,none": 0.04752784159123842,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809805,
						"acc_stderr,none": 0.039578354719809805,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800375,
						"acc_stderr,none": 0.04142972007800375,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.29304029304029305,
						"acc_norm,none": 0.29304029304029305,
						"acc_norm_stderr,none": 0.02759793255358406,
						"acc_stderr,none": 0.02759793255358406,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.03198001660115072,
						"acc_stderr,none": 0.03198001660115072,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.03377310252209194,
						"acc_stderr,none": 0.03377310252209194,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.037115139596751764,
						"acc_stderr,none": 0.037115139596751764,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.26618705035971224,
						"acc_norm,none": 0.26618705035971224,
						"acc_norm_stderr,none": 0.03762240935089088,
						"acc_stderr,none": 0.03762240935089088,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3081761006289308,
						"acc_norm,none": 0.3081761006289308,
						"acc_norm_stderr,none": 0.036734041711245634,
						"acc_stderr,none": 0.036734041711245634,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3128834355828221,
						"acc_norm,none": 0.3128834355828221,
						"acc_norm_stderr,none": 0.036429145782924055,
						"acc_stderr,none": 0.036429145782924055,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.033366051897610625,
						"acc_stderr,none": 0.033366051897610625,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2896825396825397,
						"acc_norm,none": 0.2896825396825397,
						"acc_norm_stderr,none": 0.02863192475336099,
						"acc_stderr,none": 0.02863192475336099,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.03191178226713545,
						"acc_stderr,none": 0.03191178226713545,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.38235294117647056,
						"acc_norm,none": 0.38235294117647056,
						"acc_norm_stderr,none": 0.03156663099215416,
						"acc_stderr,none": 0.03156663099215416,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26521739130434785,
						"acc_norm,none": 0.26521739130434785,
						"acc_norm_stderr,none": 0.02917176407847258,
						"acc_stderr,none": 0.02917176407847258,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174021,
						"acc_stderr,none": 0.03749850709174021,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03873144730600104,
						"acc_stderr,none": 0.03873144730600104,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26704545454545453,
						"acc_norm,none": 0.26704545454545453,
						"acc_norm_stderr,none": 0.03344352850079127,
						"acc_stderr,none": 0.03344352850079127,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.26174496644295303,
						"acc_norm,none": 0.26174496644295303,
						"acc_norm_stderr,none": 0.03613362391075456,
						"acc_stderr,none": 0.03613362391075456,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.033071627503231754,
						"acc_stderr,none": 0.033071627503231754,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.040255666847142615,
						"acc_stderr,none": 0.040255666847142615,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.0349495901617754,
						"acc_stderr,none": 0.0349495901617754,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.21818181818181817,
						"acc_norm,none": 0.21818181818181817,
						"acc_norm_stderr,none": 0.03955932861795833,
						"acc_stderr,none": 0.03955932861795833,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.26573426573426573,
						"acc_norm,none": 0.26573426573426573,
						"acc_norm_stderr,none": 0.03706860462623559,
						"acc_stderr,none": 0.03706860462623559,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.03932537680392871,
						"acc_stderr,none": 0.03932537680392871,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.032092816451453864,
						"acc_stderr,none": 0.032092816451453864,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.03407826167337436,
						"acc_stderr,none": 0.03407826167337436,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2798053527980535,
						"acc_norm,none": 0.2798053527980535,
						"acc_norm_stderr,none": 0.02216976172592782,
						"acc_stderr,none": 0.02216976172592782,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.32242990654205606,
						"acc_norm,none": 0.32242990654205606,
						"acc_norm_stderr,none": 0.03202616755131743,
						"acc_stderr,none": 0.03202616755131743,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3089430894308943,
						"acc_norm,none": 0.3089430894308943,
						"acc_norm_stderr,none": 0.04183273258787623,
						"acc_stderr,none": 0.04183273258787623,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.30327868852459017,
						"acc_norm,none": 0.30327868852459017,
						"acc_norm_stderr,none": 0.04178859878631876,
						"acc_stderr,none": 0.04178859878631876,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.030231990420749873,
						"acc_stderr,none": 0.030231990420749873,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.32222222222222224,
						"acc_norm,none": 0.32222222222222224,
						"acc_norm_stderr,none": 0.03492970288642683,
						"acc_stderr,none": 0.03492970288642683,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.291005291005291,
						"acc_norm,none": 0.291005291005291,
						"acc_norm_stderr,none": 0.03312783200356569,
						"acc_stderr,none": 0.03312783200356569,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.041265147363241,
						"acc_stderr,none": 0.041265147363241,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2689655172413793,
						"acc_norm,none": 0.2689655172413793,
						"acc_norm_stderr,none": 0.03695183311650232,
						"acc_stderr,none": 0.03695183311650232,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.29523809523809524,
						"acc_norm,none": 0.29523809523809524,
						"acc_norm_stderr,none": 0.044729159560441434,
						"acc_stderr,none": 0.044729159560441434,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.033133343292217204,
						"acc_stderr,none": 0.033133343292217204,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26066350710900477,
						"acc_norm,none": 0.26066350710900477,
						"acc_norm_stderr,none": 0.030293645661742804,
						"acc_stderr,none": 0.030293645661742804,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2579787234042553,
						"acc_norm,none": 0.2579787234042553,
						"acc_norm_stderr,none": 0.022593550801056274,
						"acc_stderr,none": 0.022593550801056274,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.31896551724137934,
						"acc_norm,none": 0.31896551724137934,
						"acc_norm_stderr,none": 0.03066552670940148,
						"acc_stderr,none": 0.03066552670940148,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3160919540229885,
						"acc_norm,none": 0.3160919540229885,
						"acc_norm_stderr,none": 0.035349438976908586,
						"acc_stderr,none": 0.035349438976908586,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.27876106194690264,
						"acc_norm,none": 0.27876106194690264,
						"acc_norm_stderr,none": 0.029892647352308926,
						"acc_stderr,none": 0.029892647352308926,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.03608541011573967,
						"acc_stderr,none": 0.03608541011573967,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2810810810810811,
						"acc_norm,none": 0.2810810810810811,
						"acc_norm_stderr,none": 0.03313956873549873,
						"acc_stderr,none": 0.03313956873549873,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.034101678366769764,
						"acc_stderr,none": 0.034101678366769764,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2795031055900621,
						"acc_norm,none": 0.2795031055900621,
						"acc_norm_stderr,none": 0.035477203909303916,
						"acc_stderr,none": 0.035477203909303916,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0463559874942472,
						"mcc_stderr,none": 0.029427883570717236
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.03487350880197771,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5113582708828945,
						"acc_stderr,none": 0.1036636683531409,
						"alias": "glue",
						"f1,none": 0.6383019269107849,
						"f1_stderr,none": 0.0003179766579110715,
						"mcc,none": 0.0463559874942472,
						"mcc_stderr,none": 0.0008660003314516894
					},
					"hellaswag": {
						"acc,none": 0.5238996215893248,
						"acc_norm,none": 0.7088229436367257,
						"acc_norm_stderr,none": 0.0045337646862119935,
						"acc_stderr,none": 0.004984077906216104,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7037647972055113,
						"acc_stderr,none": 0.019450623856608992,
						"alias": "lambada",
						"perplexity,none": 3.9456430000489986,
						"perplexity_stderr,none": 0.26754502765061006
					},
					"lambada_multilingual": {
						"acc,none": 0.5299049097613041,
						"acc_stderr,none": 0.08663238993399297,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.832662609803585,
						"perplexity_stderr,none": 8.592974755362796
					},
					"lambada_openai": {
						"acc,none": 0.7407335532699398,
						"acc_stderr,none": 0.006105429762071468,
						"alias": " - lambada_openai",
						"perplexity,none": 3.4353905376803837,
						"perplexity_stderr,none": 0.06777153398895426
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.41199301377838154,
						"acc_stderr,none": 0.006857222503405942,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 36.34057864106967,
						"perplexity_stderr,none": 1.991011415055654
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7403454298466913,
						"acc_stderr,none": 0.006108397042730502,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.4368899106408732,
						"perplexity_stderr,none": 0.06779930717092493
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45294003493110807,
						"acc_stderr,none": 0.006935054751870183,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.261877370337473,
						"perplexity_stderr,none": 1.4207366636590297
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5418202988550359,
						"acc_stderr,none": 0.006941568775008248,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 17.23692611588119,
						"perplexity_stderr,none": 0.8301448997498383
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5024257713953038,
						"acc_stderr,none": 0.006965895675973339,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.887041011088712,
						"perplexity_stderr,none": 1.2111579153668939
					},
					"lambada_standard": {
						"acc,none": 0.6669901028527072,
						"acc_stderr,none": 0.00656599183276294,
						"alias": " - lambada_standard",
						"perplexity,none": 4.454101423316415,
						"perplexity_stderr,none": 0.09639130624674028
					},
					"logiqa": {
						"acc,none": 0.23655913978494625,
						"acc_norm,none": 0.2780337941628264,
						"acc_norm_stderr,none": 0.01757318777028271,
						"acc_stderr,none": 0.016668667667174192,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.2962540948582823,
						"acc_stderr,none": 0.0486422309898191,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.039427724440366234,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34814814814814815,
						"acc_stderr,none": 0.041153246103369526,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.035834961763610625,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3660377358490566,
						"acc_stderr,none": 0.029647813539365245,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2986111111111111,
						"acc_stderr,none": 0.03827052357950756,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.30057803468208094,
						"acc_stderr,none": 0.03496101481191181,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171453,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.33617021276595743,
						"acc_stderr,none": 0.030881618520676942,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.04049339297748142,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.25517241379310346,
						"acc_stderr,none": 0.03632984052707842,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24867724867724866,
						"acc_stderr,none": 0.02226181769240016,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3412698412698413,
						"acc_stderr,none": 0.04240799327574924,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3935483870967742,
						"acc_stderr,none": 0.027791878753132274,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2512315270935961,
						"acc_stderr,none": 0.030516530732694433,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.34545454545454546,
						"acc_stderr,none": 0.03713158067481913,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.30303030303030304,
						"acc_stderr,none": 0.032742879140268674,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.38341968911917096,
						"acc_stderr,none": 0.03508984236295341,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2743589743589744,
						"acc_stderr,none": 0.02262276576749322,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.026067159222275788,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.27310924369747897,
						"acc_stderr,none": 0.02894200404099817,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23178807947019867,
						"acc_stderr,none": 0.034454062719870546,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.29908256880733947,
						"acc_stderr,none": 0.019630417285415175,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.18518518518518517,
						"acc_stderr,none": 0.026491914727355157,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.30392156862745096,
						"acc_stderr,none": 0.032282103870378935,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.29535864978902954,
						"acc_stderr,none": 0.029696338713422893,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3721973094170404,
						"acc_stderr,none": 0.03244305283008731,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3511450381679389,
						"acc_stderr,none": 0.04186445163013751,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.28119022316684383,
						"acc_stderr,none": 0.037336581125248576,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.3305785123966942,
						"acc_stderr,none": 0.04294340845212095,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.04453197507374984,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.31901840490797545,
						"acc_stderr,none": 0.03661997551073836,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3125,
						"acc_stderr,none": 0.043994650575715215,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.39805825242718446,
						"acc_stderr,none": 0.04846748253977239,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.33760683760683763,
						"acc_stderr,none": 0.03098029699261856,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.38697318007662834,
						"acc_stderr,none": 0.017417138059440132,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2976878612716763,
						"acc_stderr,none": 0.024617055388676996,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2335195530726257,
						"acc_stderr,none": 0.014149575348976262,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2679738562091503,
						"acc_stderr,none": 0.025360603796242557,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.33569359510782104,
						"acc_stderr,none": 0.0447965839682438,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3536977491961415,
						"acc_stderr,none": 0.02715520810320087,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.29012345679012347,
						"acc_stderr,none": 0.025251173936495012,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2695035460992908,
						"acc_stderr,none": 0.026469036818590624,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.25488917861799215,
						"acc_stderr,none": 0.011130509812662979,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.027678468642144696,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2908496732026144,
						"acc_stderr,none": 0.018373116915903966,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.38181818181818183,
						"acc_stderr,none": 0.046534298079135075,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.24489795918367346,
						"acc_stderr,none": 0.02752963744017491,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3015924601884952,
						"acc_stderr,none": 0.04324646530474508,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.373134328358209,
						"acc_stderr,none": 0.034198326081760065,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2746590548683794,
						"acc_stderr,none": 0.05856213087396464,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3433734939759036,
						"acc_stderr,none": 0.036965843170106004,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.34502923976608185,
						"acc_stderr,none": 0.03645981377388807,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3336729495669893,
						"acc_stderr,none": 0.004759717702118218,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3340113913751017,
						"acc_stderr,none": 0.004756803283728468,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7303921568627451,
						"acc_stderr,none": 0.02199617199531545,
						"alias": " - mrpc",
						"f1,none": 0.8307692307692308,
						"f1_stderr,none": 0.015929238496003625
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.05124653739612189,
						"exact_match_stderr,remove_whitespace": 0.0036704178575237773
					},
					"openbookqa": {
						"acc,none": 0.294,
						"acc_norm,none": 0.414,
						"acc_norm_stderr,none": 0.02204949796982787,
						"acc_stderr,none": 0.020395095484936603,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4335,
						"acc_stderr,none": 0.011083785461207559,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3765,
						"acc_stderr,none": 0.01083663191658967,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4205,
						"acc_stderr,none": 0.01104087068182141,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.01113040061763076,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.01112249319745629,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.509,
						"acc_stderr,none": 0.01118132420626028,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5035,
						"acc_stderr,none": 0.011182862030875627,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.47764285714285715,
						"acc_stderr,none": 0.0523955794519198,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7687704026115343,
						"acc_norm,none": 0.7829162132752993,
						"acc_norm_stderr,none": 0.00961870841575678,
						"acc_stderr,none": 0.009837063180625326,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7224499720971671,
						"acc_norm,none": 0.6232583000718513,
						"acc_norm_stderr,none": 0.010506116865743794,
						"acc_stderr,none": 0.16007281175301402,
						"alias": "pythia",
						"bits_per_byte,none": 0.6337524429111209,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5515954420159002,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4353905376803837,
						"perplexity_stderr,none": 0.06777153398895426,
						"word_perplexity,none": 10.475592252333357,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49881017755811824,
						"acc_stderr,none": 0.0067653913964714745,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.5877813504823152,
						"acc_stderr,none": 0.002448077822656642,
						"alias": " - qqp",
						"f1,none": 0.6366688467407892,
						"f1_stderr,none": 0.002617342238771032
					},
					"record": {
						"alias": "record",
						"em,none": 0.2832,
						"em_stderr,none": 0.004505752565401069,
						"f1,none": 0.2934652383506298,
						"f1_stderr,none": 0.004513613004242202
					},
					"rte": {
						"acc,none": 0.6534296028880866,
						"acc_stderr,none": 0.028644456994557525,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.95,
						"acc_norm,none": 0.926,
						"acc_norm_stderr,none": 0.008282064512704163,
						"acc_stderr,none": 0.0068954729748978965,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9162844036697247,
						"acc_stderr,none": 0.009384459346340936,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3181315055752817,
						"acc_stderr,none": 0.0015563184053450274,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3157894736842105,
						"bleu_acc_stderr,none": 0.016272287957916916,
						"bleu_diff,none": -7.1138104757292915,
						"bleu_diff_stderr,none": 0.8896674570142871,
						"bleu_max,none": 27.634044112680957,
						"bleu_max_stderr,none": 0.8152347414856879,
						"rouge1_acc,none": 0.28886168910648713,
						"rouge1_acc_stderr,none": 0.015866346401384308,
						"rouge1_diff,none": -9.13306929357122,
						"rouge1_diff_stderr,none": 0.9718099134947739,
						"rouge1_max,none": 52.703545231860005,
						"rouge1_max_stderr,none": 0.8758160603535571,
						"rouge2_acc,none": 0.2607099143206854,
						"rouge2_acc_stderr,none": 0.015368841620766368,
						"rouge2_diff,none": -11.172027287599581,
						"rouge2_diff_stderr,none": 1.1582012906683734,
						"rouge2_max,none": 36.570962366414335,
						"rouge2_max_stderr,none": 1.0423242358530302,
						"rougeL_acc,none": 0.28518971848225216,
						"rougeL_acc_stderr,none": 0.015805827874454892,
						"rougeL_diff,none": -9.543931809407418,
						"rougeL_diff_stderr,none": 0.9851215131831138,
						"rougeL_max,none": 50.03581883945255,
						"rougeL_max_stderr,none": 0.8992774655403986
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3157894736842105,
						"bleu_acc_stderr,none": 0.016272287957916916,
						"bleu_diff,none": -7.1138104757292915,
						"bleu_diff_stderr,none": 0.8896674570142871,
						"bleu_max,none": 27.634044112680957,
						"bleu_max_stderr,none": 0.8152347414856879,
						"rouge1_acc,none": 0.28886168910648713,
						"rouge1_acc_stderr,none": 0.015866346401384308,
						"rouge1_diff,none": -9.13306929357122,
						"rouge1_diff_stderr,none": 0.9718099134947739,
						"rouge1_max,none": 52.703545231860005,
						"rouge1_max_stderr,none": 0.8758160603535571,
						"rouge2_acc,none": 0.2607099143206854,
						"rouge2_acc_stderr,none": 0.015368841620766368,
						"rouge2_diff,none": -11.172027287599581,
						"rouge2_diff_stderr,none": 1.1582012906683734,
						"rouge2_max,none": 36.570962366414335,
						"rouge2_max_stderr,none": 1.0423242358530302,
						"rougeL_acc,none": 0.28518971848225216,
						"rougeL_acc_stderr,none": 0.015805827874454892,
						"rougeL_diff,none": -9.543931809407418,
						"rougeL_diff_stderr,none": 0.9851215131831138,
						"rougeL_max,none": 50.03581883945255,
						"rougeL_max_stderr,none": 0.8992774655403986
					},
					"truthfulqa_mc1": {
						"acc,none": 0.24479804161566707,
						"acc_stderr,none": 0.015051869486715006,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3914649695348963,
						"acc_stderr,none": 0.013992555224963564,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6337524429111209,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5515954420159002,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.475592252333357,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6779794790844514,
						"acc_stderr,none": 0.013132070202071076,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4788732394366197,
						"acc_stderr,none": 0.05970805879899504,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.4326923076923077,
						"acc_stderr,none": 0.04881803687006195,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6176363636363635,
						"acc_stderr,none": 0.07342809337816081,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.022017482578127676,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.02237429816635318,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.020143572847290802,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.019635965529725512,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.02227969410784342,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.02168382753928611,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.020395095484936614,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.020395095484936603,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43617135207496655,
						"acc_stderr,none": 0.04912194143307382,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.009460223484996477,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.45943775100401607,
						"acc_stderr,none": 0.009989039874786897,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4907630522088353,
						"acc_stderr,none": 0.010020362530631355,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39076305220883534,
						"acc_stderr,none": 0.009779967579941793,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5349397590361445,
						"acc_stderr,none": 0.009997573294114558,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4979919678714859,
						"acc_stderr,none": 0.010021992045038413,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4983935742971888,
						"acc_stderr,none": 0.01002202114110211,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43373493975903615,
						"acc_stderr,none": 0.009933667945702095,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4923694779116466,
						"acc_stderr,none": 0.010020905731542313,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3827309236947791,
						"acc_stderr,none": 0.00974252634088406,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41004016064257026,
						"acc_stderr,none": 0.009858525713807865,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44899598393574297,
						"acc_stderr,none": 0.009969793477240828,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40923694779116465,
						"acc_stderr,none": 0.00985556741448024,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40843373493975904,
						"acc_stderr,none": 0.009852581919032247,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3497991967871486,
						"acc_stderr,none": 0.009559181474778303,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6273389086095903,
						"acc_stderr,none": 0.060280339947664276,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5936465916611515,
						"acc_stderr,none": 0.01263942942038987,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.771674387822634,
						"acc_stderr,none": 0.010802042577302275,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7107875579086698,
						"acc_stderr,none": 0.011667825388305481,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5592322964923891,
						"acc_stderr,none": 0.012776518586332792,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6015883520847121,
						"acc_stderr,none": 0.012598743938252875,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6631369953673064,
						"acc_stderr,none": 0.012162974996136392,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5440105890138981,
						"acc_stderr,none": 0.012817182901076038,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6796823295830576,
						"acc_stderr,none": 0.012007565507943376,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5506287227001986,
						"acc_stderr,none": 0.01280099159129337,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5883520847121112,
						"acc_stderr,none": 0.012664648329214084,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6379880873593646,
						"acc_stderr,none": 0.01236742376945643,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8107439874129018,
						"acc_stderr,none": 0.03588827224009229,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8653763440860215,
						"acc_stderr,none": 0.007080193677104257,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.748696558915537,
						"acc_stderr,none": 0.014014234546353822,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7832699619771863,
						"acc_stderr,none": 0.025454504291142595,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6730158730158731,
						"acc_stderr,none": 0.026473487980890983,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7936507936507936,
						"acc_stderr,none": 0.01804397166082725,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/chunk4-0_85_pth"
	},
	"./rwkv-x-dev/chunk5-0_85_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6130214205186019,
						"acc_norm,none": 0.6031567080045096,
						"acc_norm_stderr,none": 0.08977785447507212,
						"acc_stderr,none": 0.11071126929342981,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.346875,
						"acc_stderr,none": 0.014774399424495795,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8182835820895522,
						"acc_stderr,none": 0.16512384537695862,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2849248834398205,
						"acc_norm,none": 0.2849248834398205,
						"acc_norm_stderr,none": 0.04913195288311481,
						"acc_stderr,none": 0.04913195288311481,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.4796629347308241,
						"acc_stderr,none": 0.006761283317865543,
						"alias": "glue",
						"f1,none": 0.6133663563734792,
						"f1_stderr,none": 0.0004093269153353357,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.7073549388705609,
						"acc_stderr,none": 0.01811942469545922,
						"alias": "lambada",
						"perplexity,none": 3.894390200134005,
						"perplexity_stderr,none": 0.26262645023678716
					},
					"lambada_multilingual": {
						"acc,none": 0.5303318455268775,
						"acc_stderr,none": 0.08694475550634134,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.848471843449293,
						"perplexity_stderr,none": 8.611074602154208
					},
					"mmlu": {
						"acc,none": 0.29582680529839056,
						"acc_stderr,none": 0.04700121528684729,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.28522848034006376,
						"acc_stderr,none": 0.04081878653306117,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.32668168651432244,
						"acc_stderr,none": 0.04177169924358086,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2970425739356516,
						"acc_stderr,none": 0.046163380699655025,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.28005074532191565,
						"acc_stderr,none": 0.05334021746151728,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.47271428571428575,
						"acc_stderr,none": 0.053881214201376704,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.722428966869438,
						"acc_norm,none": 0.6076877903819815,
						"acc_norm_stderr,none": 0.010137932988434151,
						"acc_stderr,none": 0.15453512761576974,
						"alias": "pythia",
						"bits_per_byte,none": 0.6339480774326424,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5518058580810754,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3951411131937714,
						"perplexity_stderr,none": 0.0667976541857992,
						"word_perplexity,none": 10.483191205954851,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3158868932479996,
						"acc_stderr,none": 0.001474964872568736,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31456548347613217,
						"bleu_acc_stderr,none": 0.016255241993179178,
						"bleu_diff,none": -8.115588433072176,
						"bleu_diff_stderr,none": 0.8711074553379279,
						"bleu_max,none": 27.060720564403105,
						"bleu_max_stderr,none": 0.8067583398151325,
						"rouge1_acc,none": 0.2827417380660955,
						"rouge1_acc_stderr,none": 0.015764770836777298,
						"rouge1_diff,none": -9.980840511811554,
						"rouge1_diff_stderr,none": 0.9406780362050388,
						"rouge1_max,none": 52.16640618038586,
						"rouge1_max_stderr,none": 0.8772276793519004,
						"rouge2_acc,none": 0.2533659730722154,
						"rouge2_acc_stderr,none": 0.015225899340826856,
						"rouge2_diff,none": -12.531989484061667,
						"rouge2_diff_stderr,none": 1.1339750186564863,
						"rouge2_max,none": 35.80518403952095,
						"rouge2_max_stderr,none": 1.0368261220232144,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.015764770836777298,
						"rougeL_diff,none": -10.62216001046842,
						"rougeL_diff_stderr,none": 0.9555474211080631,
						"rougeL_max,none": 49.328214885336386,
						"rougeL_max_stderr,none": 0.8974717976152313
					},
					"xcopa": {
						"acc,none": 0.6203636363636362,
						"acc_stderr,none": 0.07254150104079769,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43654618473895584,
						"acc_stderr,none": 0.053020515567198806,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6287828650502376,
						"acc_stderr,none": 0.06155214648203959,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8109687570240504,
						"acc_stderr,none": 0.0358626719988003,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6130214205186019,
						"acc_norm,none": 0.6031567080045096,
						"acc_norm_stderr,none": 0.08977785447507212,
						"acc_stderr,none": 0.11071126929342981,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.346875,
						"acc_stderr,none": 0.014774399424495795,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.346,
						"acc_stderr,none": 0.015050266127564443,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.353,
						"acc_stderr,none": 0.01512017260548369,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3425,
						"acc_stderr,none": 0.013704669762934732,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.378839590443686,
						"acc_norm,none": 0.4138225255972696,
						"acc_norm_stderr,none": 0.014392730009221007,
						"acc_stderr,none": 0.014175915490000322,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7285353535353535,
						"acc_norm,none": 0.6965488215488216,
						"acc_norm_stderr,none": 0.009433837434252272,
						"acc_stderr,none": 0.009125362970360623,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8182835820895522,
						"acc_stderr,none": 0.16512384537695862,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523719,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767684,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469354,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.82,
						"acc_stderr,none": 0.012155153135511958,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787726,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.766,
						"acc_stderr,none": 0.013394902889660014,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.553,
						"acc_stderr,none": 0.015730176046009067,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.783,
						"acc_stderr,none": 0.01304151375727071,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.866,
						"acc_stderr,none": 0.010777762298369688,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987286,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256606,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571402,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380711,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.00627362402111876,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024971,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689075,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345698,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.014606483127342761,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.719,
						"acc_stderr,none": 0.014221154708434929,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.757,
						"acc_stderr,none": 0.013569640199177446,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919283,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357796,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306516,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.266,
						"acc_stderr,none": 0.013979965645145146,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695792,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.802,
						"acc_stderr,none": 0.012607733934175308,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.604,
						"acc_stderr,none": 0.01547331326585941,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.759,
						"acc_stderr,none": 0.013531522534515438,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796396,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936694,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323504,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400252,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.667,
						"acc_stderr,none": 0.014910846164229863,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.00936368937324814,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.394,
						"acc_stderr,none": 0.015459721957493377,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.611,
						"acc_stderr,none": 0.015424555647308496,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.539,
						"acc_stderr,none": 0.01577110420128319,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866437,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.622,
						"acc_stderr,none": 0.015341165254026646,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340971,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.0096168333396958,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.013106173040661757,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.0074548356504067275,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578232,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032140007,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.011297239823409312,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.507,
						"acc_stderr,none": 0.015817749561843564,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817138,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565933,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.709,
						"acc_stderr,none": 0.014370995982377932,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.449,
						"acc_stderr,none": 0.015736792768752023,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651523,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.007088105617246437,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.593,
						"acc_stderr,none": 0.015543249100255544,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.010640169792499356,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617331,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.011297239823409308,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.00733517585370684,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315146,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178331,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727057,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.385,
						"acc_stderr,none": 0.015395194445410806,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.304,
						"acc_stderr,none": 0.014553205687950429,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2849248834398205,
						"acc_norm,none": 0.2849248834398205,
						"acc_norm_stderr,none": 0.04913195288311481,
						"acc_stderr,none": 0.04913195288311481,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.27702702702702703,
						"acc_norm,none": 0.27702702702702703,
						"acc_norm_stderr,none": 0.036911647897386525,
						"acc_stderr,none": 0.036911647897386525,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.03304756158810786,
						"acc_stderr,none": 0.03304756158810786,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.037826149818120415,
						"acc_stderr,none": 0.037826149818120415,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.28484848484848485,
						"acc_norm,none": 0.28484848484848485,
						"acc_norm_stderr,none": 0.03524390844511783,
						"acc_stderr,none": 0.03524390844511783,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.31100478468899523,
						"acc_norm,none": 0.31100478468899523,
						"acc_norm_stderr,none": 0.032096669533489795,
						"acc_stderr,none": 0.032096669533489795,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.29770992366412213,
						"acc_norm,none": 0.29770992366412213,
						"acc_norm_stderr,none": 0.040103589424622034,
						"acc_stderr,none": 0.040103589424622034,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3088235294117647,
						"acc_norm,none": 0.3088235294117647,
						"acc_norm_stderr,none": 0.03976333292288875,
						"acc_stderr,none": 0.03976333292288875,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.29906542056074764,
						"acc_norm,none": 0.29906542056074764,
						"acc_norm_stderr,none": 0.044470182376718334,
						"acc_stderr,none": 0.044470182376718334,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.29721362229102166,
						"acc_norm,none": 0.29721362229102166,
						"acc_norm_stderr,none": 0.025469363219004764,
						"acc_stderr,none": 0.025469363219004764,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.031980016601150726,
						"acc_stderr,none": 0.031980016601150726,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2905027932960894,
						"acc_norm,none": 0.2905027932960894,
						"acc_norm_stderr,none": 0.034028319367948166,
						"acc_stderr,none": 0.034028319367948166,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25738396624472576,
						"acc_norm,none": 0.25738396624472576,
						"acc_norm_stderr,none": 0.0284588209914603,
						"acc_stderr,none": 0.0284588209914603,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.18867924528301888,
						"acc_norm,none": 0.18867924528301888,
						"acc_norm_stderr,none": 0.0381824426969915,
						"acc_stderr,none": 0.0381824426969915,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.38317757009345793,
						"acc_norm,none": 0.38317757009345793,
						"acc_norm_stderr,none": 0.047220130807712334,
						"acc_stderr,none": 0.047220130807712334,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3867924528301887,
						"acc_norm,none": 0.3867924528301887,
						"acc_norm_stderr,none": 0.04752784159123843,
						"acc_stderr,none": 0.04752784159123843,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980982,
						"acc_stderr,none": 0.03957835471980982,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.041175810978451015,
						"acc_stderr,none": 0.041175810978451015,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371222,
						"acc_stderr,none": 0.04198857662371222,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.27472527472527475,
						"acc_norm,none": 0.27472527472527475,
						"acc_norm_stderr,none": 0.027065504564389525,
						"acc_stderr,none": 0.027065504564389525,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3088235294117647,
						"acc_norm,none": 0.3088235294117647,
						"acc_norm_stderr,none": 0.03242661719827218,
						"acc_stderr,none": 0.03242661719827218,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.24561403508771928,
						"acc_norm,none": 0.24561403508771928,
						"acc_norm_stderr,none": 0.03301405946987251,
						"acc_stderr,none": 0.03301405946987251,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.29931972789115646,
						"acc_norm,none": 0.29931972789115646,
						"acc_norm_stderr,none": 0.03790104530910391,
						"acc_stderr,none": 0.03790104530910391,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.28776978417266186,
						"acc_norm,none": 0.28776978417266186,
						"acc_norm_stderr,none": 0.03853836179233389,
						"acc_stderr,none": 0.03853836179233389,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.32075471698113206,
						"acc_norm,none": 0.32075471698113206,
						"acc_norm_stderr,none": 0.03713396279871006,
						"acc_stderr,none": 0.03713396279871006,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.31901840490797545,
						"acc_norm,none": 0.31901840490797545,
						"acc_norm_stderr,none": 0.03661997551073836,
						"acc_stderr,none": 0.03661997551073836,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.0345162887625062,
						"acc_stderr,none": 0.0345162887625062,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.028394293050790508,
						"acc_stderr,none": 0.028394293050790508,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.25252525252525254,
						"acc_norm,none": 0.25252525252525254,
						"acc_norm_stderr,none": 0.030954055470365907,
						"acc_stderr,none": 0.030954055470365907,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3865546218487395,
						"acc_norm,none": 0.3865546218487395,
						"acc_norm_stderr,none": 0.0316314580755238,
						"acc_stderr,none": 0.0316314580755238,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26521739130434785,
						"acc_norm,none": 0.26521739130434785,
						"acc_norm_stderr,none": 0.02917176407847258,
						"acc_stderr,none": 0.02917176407847258,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.34265734265734266,
						"acc_norm,none": 0.34265734265734266,
						"acc_norm_stderr,none": 0.039827381778096436,
						"acc_stderr,none": 0.039827381778096436,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03366618544627455,
						"acc_stderr,none": 0.03366618544627455,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2953020134228188,
						"acc_norm,none": 0.2953020134228188,
						"acc_norm_stderr,none": 0.037497633645270485,
						"acc_stderr,none": 0.037497633645270485,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552485,
						"acc_stderr,none": 0.03703667194552485,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2288135593220339,
						"acc_norm,none": 0.2288135593220339,
						"acc_norm_stderr,none": 0.0388353872453885,
						"acc_stderr,none": 0.0388353872453885,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.03494959016177541,
						"acc_stderr,none": 0.03494959016177541,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.19090909090909092,
						"acc_norm,none": 0.19090909090909092,
						"acc_norm_stderr,none": 0.03764425585984924,
						"acc_stderr,none": 0.03764425585984924,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695623,
						"acc_stderr,none": 0.03737392962695623,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.24603174603174602,
						"acc_norm,none": 0.24603174603174602,
						"acc_norm_stderr,none": 0.03852273364924316,
						"acc_stderr,none": 0.03852273364924316,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.29651162790697677,
						"acc_norm,none": 0.29651162790697677,
						"acc_norm_stderr,none": 0.03492619473255953,
						"acc_stderr,none": 0.03492619473255953,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2773722627737226,
						"acc_norm,none": 0.2773722627737226,
						"acc_norm_stderr,none": 0.022110415304121923,
						"acc_stderr,none": 0.022110415304121923,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.35046728971962615,
						"acc_norm,none": 0.35046728971962615,
						"acc_norm_stderr,none": 0.03269147055032477,
						"acc_stderr,none": 0.03269147055032477,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.040113743936211456,
						"acc_stderr,none": 0.040113743936211456,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.30327868852459017,
						"acc_norm,none": 0.30327868852459017,
						"acc_norm_stderr,none": 0.04178859878631876,
						"acc_stderr,none": 0.04178859878631876,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2904761904761905,
						"acc_norm,none": 0.2904761904761905,
						"acc_norm_stderr,none": 0.031402600480698775,
						"acc_stderr,none": 0.031402600480698775,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3055555555555556,
						"acc_norm,none": 0.3055555555555556,
						"acc_norm_stderr,none": 0.03443002441392583,
						"acc_stderr,none": 0.03443002441392583,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03294754314388876,
						"acc_stderr,none": 0.03294754314388876,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.041678081808441535,
						"acc_stderr,none": 0.041678081808441535,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2482758620689655,
						"acc_norm,none": 0.2482758620689655,
						"acc_norm_stderr,none": 0.036001056927277716,
						"acc_stderr,none": 0.036001056927277716,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.0451367671816831,
						"acc_stderr,none": 0.0451367671816831,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2914285714285714,
						"acc_norm,none": 0.2914285714285714,
						"acc_norm_stderr,none": 0.034449526562290195,
						"acc_stderr,none": 0.034449526562290195,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26066350710900477,
						"acc_norm,none": 0.26066350710900477,
						"acc_norm_stderr,none": 0.030293645661742804,
						"acc_stderr,none": 0.030293645661742804,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.26595744680851063,
						"acc_norm,none": 0.26595744680851063,
						"acc_norm_stderr,none": 0.022816607010135298,
						"acc_stderr,none": 0.022816607010135298,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.33189655172413796,
						"acc_norm,none": 0.33189655172413796,
						"acc_norm_stderr,none": 0.030982555535700885,
						"acc_stderr,none": 0.030982555535700885,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3390804597701149,
						"acc_norm,none": 0.3390804597701149,
						"acc_norm_stderr,none": 0.03599172203897236,
						"acc_stderr,none": 0.03599172203897236,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.037125378336148665,
						"acc_stderr,none": 0.037125378336148665,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.2920353982300885,
						"acc_norm,none": 0.2920353982300885,
						"acc_norm_stderr,none": 0.03031323322398852,
						"acc_stderr,none": 0.03031323322398852,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.035014387062967806,
						"acc_stderr,none": 0.035014387062967806,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2810810810810811,
						"acc_norm,none": 0.2810810810810811,
						"acc_norm_stderr,none": 0.03313956873549873,
						"acc_stderr,none": 0.03313956873549873,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2781065088757396,
						"acc_norm,none": 0.2781065088757396,
						"acc_norm_stderr,none": 0.034569054303762434,
						"acc_stderr,none": 0.034569054303762434,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.03637652289278585,
						"acc_stderr,none": 0.03637652289278585,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2875,
						"acc_norm,none": 0.2875,
						"acc_norm_stderr,none": 0.035893251060583956,
						"acc_stderr,none": 0.035893251060583956,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.03379976689896309,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.4796629347308241,
						"acc_stderr,none": 0.006761283317865543,
						"alias": "glue",
						"f1,none": 0.6133663563734792,
						"f1_stderr,none": 0.0004093269153353357,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.5248954391555467,
						"acc_norm,none": 0.7090221071499702,
						"acc_norm_stderr,none": 0.004532850566893529,
						"acc_stderr,none": 0.004983592410934175,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7073549388705609,
						"acc_stderr,none": 0.01811942469545922,
						"alias": "lambada",
						"perplexity,none": 3.894390200134005,
						"perplexity_stderr,none": 0.26262645023678716
					},
					"lambada_multilingual": {
						"acc,none": 0.5303318455268775,
						"acc_stderr,none": 0.08694475550634134,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.848471843449293,
						"perplexity_stderr,none": 8.611074602154208
					},
					"lambada_openai": {
						"acc,none": 0.7420919852513099,
						"acc_stderr,none": 0.006094995125652958,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3951411131937714,
						"perplexity_stderr,none": 0.0667976541857992
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.41276926062487873,
						"acc_stderr,none": 0.006859147422201019,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 36.35537353156207,
						"perplexity_stderr,none": 1.9962390758381041
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7413157384048127,
						"acc_stderr,none": 0.006100967149142446,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3958144585595744,
						"perplexity_stderr,none": 0.06680246280856031
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45002910925674366,
						"acc_stderr,none": 0.006931101003281443,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.316309358961604,
						"perplexity_stderr,none": 1.426554516444485
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5420143605666602,
						"acc_stderr,none": 0.006941341313928111,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 17.223525697418676,
						"perplexity_stderr,none": 0.8299861516447586
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5055307587812925,
						"acc_stderr,none": 0.006965551475495912,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.951336170744536,
						"perplexity_stderr,none": 1.213806335757547
					},
					"lambada_standard": {
						"acc,none": 0.673394139336309,
						"acc_stderr,none": 0.0065336930212616965,
						"alias": " - lambada_standard",
						"perplexity,none": 4.39332998677452,
						"perplexity_stderr,none": 0.0948724571203966
					},
					"logiqa": {
						"acc,none": 0.2304147465437788,
						"acc_norm,none": 0.271889400921659,
						"acc_norm_stderr,none": 0.01745171600943683,
						"acc_stderr,none": 0.016516834820590964,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.29582680529839056,
						"acc_stderr,none": 0.04700121528684729,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34814814814814815,
						"acc_stderr,none": 0.041153246103369526,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.035834961763610645,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3622641509433962,
						"acc_stderr,none": 0.0295822451283843,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2638888888888889,
						"acc_stderr,none": 0.03685651095897532,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2774566473988439,
						"acc_stderr,none": 0.03414014007044036,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.22549019607843138,
						"acc_stderr,none": 0.04158307533083286,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3404255319148936,
						"acc_stderr,none": 0.030976692998534436,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21929824561403508,
						"acc_stderr,none": 0.03892431106518754,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2896551724137931,
						"acc_stderr,none": 0.03780019230438014,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24603174603174602,
						"acc_stderr,none": 0.022182037202948368,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.30952380952380953,
						"acc_stderr,none": 0.04134913018303316,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.38387096774193546,
						"acc_stderr,none": 0.027666182075539638,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.031447125816782426,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3393939393939394,
						"acc_stderr,none": 0.036974422050315967,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.30808080808080807,
						"acc_stderr,none": 0.03289477330098615,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.39378238341968913,
						"acc_stderr,none": 0.03526077095548237,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2717948717948718,
						"acc_stderr,none": 0.022556551010132358,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2518518518518518,
						"acc_stderr,none": 0.026466117538959916,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.27310924369747897,
						"acc_stderr,none": 0.028942004040998167,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.271523178807947,
						"acc_stderr,none": 0.03631329803969653,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.28623853211009176,
						"acc_stderr,none": 0.01937943662891998,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.19907407407407407,
						"acc_stderr,none": 0.027232298462690232,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.03308611113236436,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3080168776371308,
						"acc_stderr,none": 0.030052389335605702,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3632286995515695,
						"acc_stderr,none": 0.032277904428505,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.31297709923664124,
						"acc_stderr,none": 0.04066962905677698,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.28522848034006376,
						"acc_stderr,none": 0.04081878653306117,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.32231404958677684,
						"acc_stderr,none": 0.042664163633521685,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04557239513497752,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3496932515337423,
						"acc_stderr,none": 0.03746668325470021,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841044,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.33980582524271846,
						"acc_stderr,none": 0.046897659372781335,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3547008547008547,
						"acc_stderr,none": 0.03134250486245402,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.37037037037037035,
						"acc_stderr,none": 0.017268607560005776,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2976878612716763,
						"acc_stderr,none": 0.024617055388676992,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.28431372549019607,
						"acc_stderr,none": 0.025829163272757485,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.32668168651432244,
						"acc_stderr,none": 0.04177169924358086,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3536977491961415,
						"acc_stderr,none": 0.02715520810320088,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2932098765432099,
						"acc_stderr,none": 0.025329888171900922,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.24822695035460993,
						"acc_stderr,none": 0.025770015644290396,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2588005215123859,
						"acc_stderr,none": 0.011186109046564611,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.27941176470588236,
						"acc_stderr,none": 0.02725720260611495,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2908496732026144,
						"acc_stderr,none": 0.018373116915903966,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.0469237132203465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.22857142857142856,
						"acc_stderr,none": 0.026882144922307744,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2970425739356516,
						"acc_stderr,none": 0.046163380699655025,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.39303482587064675,
						"acc_stderr,none": 0.0345368246603156,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.28005074532191565,
						"acc_stderr,none": 0.05334021746151728,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3253012048192771,
						"acc_stderr,none": 0.03647168523683227,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.32748538011695905,
						"acc_stderr,none": 0.035993357714560276,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.33316352521650533,
						"acc_stderr,none": 0.004757900669144069,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3318755085435313,
						"acc_stderr,none": 0.004749167092841591,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7303921568627451,
						"acc_stderr,none": 0.02199617199531545,
						"alias": " - mrpc",
						"f1,none": 0.8302469135802469,
						"f1_stderr,none": 0.015933295290768014
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.049030470914127426,
						"exact_match_stderr,remove_whitespace": 0.0035943709198966063
					},
					"openbookqa": {
						"acc,none": 0.302,
						"acc_norm,none": 0.41,
						"acc_norm_stderr,none": 0.022017482578127676,
						"acc_stderr,none": 0.02055326917420921,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.423,
						"acc_stderr,none": 0.0110497306878554,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.378,
						"acc_stderr,none": 0.010845128325852172,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.404,
						"acc_stderr,none": 0.010975072943404666,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.011131484850525779,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5465,
						"acc_stderr,none": 0.011134669525078668,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.011183046555618484,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5075,
						"acc_stderr,none": 0.011181877847485998,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.47271428571428575,
						"acc_stderr,none": 0.053881214201376704,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7600652883569097,
						"acc_norm,none": 0.7714907508161044,
						"acc_norm_stderr,none": 0.009796313511829517,
						"acc_stderr,none": 0.009963625892809545,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.722428966869438,
						"acc_norm,none": 0.6076877903819815,
						"acc_norm_stderr,none": 0.010137932988434151,
						"acc_stderr,none": 0.15453512761576974,
						"alias": "pythia",
						"bits_per_byte,none": 0.6339480774326424,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5518058580810754,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3951411131937714,
						"perplexity_stderr,none": 0.0667976541857992,
						"word_perplexity,none": 10.483191205954851,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49917627677100496,
						"acc_stderr,none": 0.006765401370838248,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.5355923818946327,
						"acc_stderr,none": 0.002480392237462397,
						"alias": " - qqp",
						"f1,none": 0.6114881642112233,
						"f1_stderr,none": 0.0026122747144085587
					},
					"record": {
						"alias": "record",
						"em,none": 0.2775,
						"em_stderr,none": 0.004477876744397468,
						"f1,none": 0.2876052383452654,
						"f1_stderr,none": 0.004486808686875493
					},
					"rte": {
						"acc,none": 0.6462093862815884,
						"acc_stderr,none": 0.028780957835424687,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.945,
						"acc_norm,none": 0.915,
						"acc_norm_stderr,none": 0.008823426366942347,
						"acc_stderr,none": 0.007212976294639218,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9071100917431193,
						"acc_stderr,none": 0.009835698073987972,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3158868932479996,
						"acc_stderr,none": 0.001474964872568736,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31456548347613217,
						"bleu_acc_stderr,none": 0.016255241993179178,
						"bleu_diff,none": -8.115588433072176,
						"bleu_diff_stderr,none": 0.8711074553379279,
						"bleu_max,none": 27.060720564403105,
						"bleu_max_stderr,none": 0.8067583398151325,
						"rouge1_acc,none": 0.2827417380660955,
						"rouge1_acc_stderr,none": 0.015764770836777298,
						"rouge1_diff,none": -9.980840511811554,
						"rouge1_diff_stderr,none": 0.9406780362050388,
						"rouge1_max,none": 52.16640618038586,
						"rouge1_max_stderr,none": 0.8772276793519004,
						"rouge2_acc,none": 0.2533659730722154,
						"rouge2_acc_stderr,none": 0.015225899340826856,
						"rouge2_diff,none": -12.531989484061667,
						"rouge2_diff_stderr,none": 1.1339750186564863,
						"rouge2_max,none": 35.80518403952095,
						"rouge2_max_stderr,none": 1.0368261220232144,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.015764770836777298,
						"rougeL_diff,none": -10.62216001046842,
						"rougeL_diff_stderr,none": 0.9555474211080631,
						"rougeL_max,none": 49.328214885336386,
						"rougeL_max_stderr,none": 0.8974717976152313
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.31456548347613217,
						"bleu_acc_stderr,none": 0.016255241993179178,
						"bleu_diff,none": -8.115588433072176,
						"bleu_diff_stderr,none": 0.8711074553379279,
						"bleu_max,none": 27.060720564403105,
						"bleu_max_stderr,none": 0.8067583398151325,
						"rouge1_acc,none": 0.2827417380660955,
						"rouge1_acc_stderr,none": 0.015764770836777298,
						"rouge1_diff,none": -9.980840511811554,
						"rouge1_diff_stderr,none": 0.9406780362050388,
						"rouge1_max,none": 52.16640618038586,
						"rouge1_max_stderr,none": 0.8772276793519004,
						"rouge2_acc,none": 0.2533659730722154,
						"rouge2_acc_stderr,none": 0.015225899340826856,
						"rouge2_diff,none": -12.531989484061667,
						"rouge2_diff_stderr,none": 1.1339750186564863,
						"rouge2_max,none": 35.80518403952095,
						"rouge2_max_stderr,none": 1.0368261220232144,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.015764770836777298,
						"rougeL_diff,none": -10.62216001046842,
						"rougeL_diff_stderr,none": 0.9555474211080631,
						"rougeL_max,none": 49.328214885336386,
						"rougeL_max_stderr,none": 0.8974717976152313
					},
					"truthfulqa_mc1": {
						"acc,none": 0.24479804161566707,
						"acc_stderr,none": 0.015051869486714997,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3869757448803322,
						"acc_stderr,none": 0.013973887363280826,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6339480774326424,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5518058580810754,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.483191205954851,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6724546172059984,
						"acc_stderr,none": 0.013190169546797017,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5070422535211268,
						"acc_stderr,none": 0.059755502635482904,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.41346153846153844,
						"acc_stderr,none": 0.04852294969729053,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6203636363636362,
						"acc_stderr,none": 0.07254150104079769,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.021930844120728505,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.02237429816635319,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.020313179231745183,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.746,
						"acc_stderr,none": 0.019486596801643382,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.02224224437573102,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.022033677993740862,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.02161328916516578,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.02047511809298897,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.020354375480530085,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43654618473895584,
						"acc_stderr,none": 0.053020515567198806,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337343,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4710843373493976,
						"acc_stderr,none": 0.010005299609236084,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4891566265060241,
						"acc_stderr,none": 0.010019715824483489,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38393574297188754,
						"acc_stderr,none": 0.009748321202534393,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5413654618473895,
						"acc_stderr,none": 0.009987716412406594,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5016064257028112,
						"acc_stderr,none": 0.0100220211411021,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4995983935742972,
						"acc_stderr,none": 0.010022069634353845,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42690763052208835,
						"acc_stderr,none": 0.009914408828583415,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4899598393574297,
						"acc_stderr,none": 0.010020052116889137,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.38714859437751004,
						"acc_stderr,none": 0.009763465328590654,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40522088353413654,
						"acc_stderr,none": 0.009840367477589288,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.45502008032128516,
						"acc_stderr,none": 0.009981437307797271,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40843373493975904,
						"acc_stderr,none": 0.009852581919032231,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.39879518072289155,
						"acc_stderr,none": 0.009814625416137585,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3542168674698795,
						"acc_stderr,none": 0.009586620142951845,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6287828650502376,
						"acc_stderr,none": 0.06155214648203959,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5923229649238915,
						"acc_stderr,none": 0.012645876488040303,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7769688947716744,
						"acc_stderr,none": 0.010712628906979183,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7107875579086698,
						"acc_stderr,none": 0.01166782538830548,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5605559232296492,
						"acc_stderr,none": 0.012772408697979139,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6015883520847121,
						"acc_stderr,none": 0.012598743938252863,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6671078755790867,
						"acc_stderr,none": 0.012127221798743733,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5387160820648577,
						"acc_stderr,none": 0.01282849335327154,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6829913964262078,
						"acc_stderr,none": 0.011974424977110291,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5539377895433488,
						"acc_stderr,none": 0.012792037953589644,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5923229649238915,
						"acc_stderr,none": 0.012645876488040303,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6393117140966248,
						"acc_stderr,none": 0.012357592682139025,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8109687570240504,
						"acc_stderr,none": 0.0358626719988003,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8666666666666667,
						"acc_stderr,none": 0.007051432501634728,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7455683003128258,
						"acc_stderr,none": 0.014071700152397672,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7870722433460076,
						"acc_stderr,none": 0.025291395445662838,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6793650793650794,
						"acc_stderr,none": 0.02633857021981404,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7896825396825397,
						"acc_stderr,none": 0.018171046497690278,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/chunk5-0_85_pth"
	},
	"./rwkv-x-dev/chunk6-0_85_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6118940248027057,
						"acc_norm,none": 0.5899098083427283,
						"acc_norm_stderr,none": 0.08798275858466731,
						"acc_stderr,none": 0.11217891945207423,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.346875,
						"acc_stderr,none": 0.014755441858016966,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8171492537313433,
						"acc_stderr,none": 0.17230992692014513,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.28647901916767404,
						"acc_norm,none": 0.28647901916767404,
						"acc_norm_stderr,none": 0.050274557246225864,
						"acc_stderr,none": 0.050274557246225864,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.478427227251072,
						"acc_stderr,none": 0.006574043000490415,
						"alias": "glue",
						"f1,none": 0.611608824637493,
						"f1_stderr,none": 0.0004451790155942701,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.7109450805356103,
						"acc_stderr,none": 0.01698027072844337,
						"alias": "lambada",
						"perplexity,none": 3.856322751461975,
						"perplexity_stderr,none": 0.23812369544017875
					},
					"lambada_multilingual": {
						"acc,none": 0.5314185911119736,
						"acc_stderr,none": 0.0829932804405175,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.897574677159362,
						"perplexity_stderr,none": 8.42612658886791
					},
					"mmlu": {
						"acc,none": 0.3012391397236861,
						"acc_stderr,none": 0.05136502401683466,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2879914984059511,
						"acc_stderr,none": 0.041994425715844975,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.33859028001287417,
						"acc_stderr,none": 0.04914440674842426,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3067923301917452,
						"acc_stderr,none": 0.050380435586824285,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2787821122740247,
						"acc_stderr,none": 0.05591343384805093,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.47828571428571426,
						"acc_stderr,none": 0.0499793595859735,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7226932236450248,
						"acc_norm,none": 0.5948231683245433,
						"acc_norm_stderr,none": 0.009884300653715268,
						"acc_stderr,none": 0.15995504154556037,
						"alias": "pythia",
						"bits_per_byte,none": 0.6351280330753049,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5530755727664614,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.406426465819982,
						"perplexity_stderr,none": 0.06704743442173354,
						"word_perplexity,none": 10.529140780319349,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.31445747037528976,
						"acc_stderr,none": 0.0014673143279740634,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31701346389228885,
						"bleu_acc_stderr,none": 0.01628920337440338,
						"bleu_diff,none": -7.9053171764337655,
						"bleu_diff_stderr,none": 0.8617889123250447,
						"bleu_max,none": 27.104666977072068,
						"bleu_max_stderr,none": 0.8085762180228528,
						"rouge1_acc,none": 0.2778457772337821,
						"rouge1_acc_stderr,none": 0.015680929364024643,
						"rouge1_diff,none": -10.123536950929477,
						"rouge1_diff_stderr,none": 0.9186378607102484,
						"rouge1_max,none": 52.031208472926004,
						"rouge1_max_stderr,none": 0.8808710259528939,
						"rouge2_acc,none": 0.25703794369645044,
						"rouge2_acc_stderr,none": 0.015298077509485088,
						"rouge2_diff,none": -12.289989382651818,
						"rouge2_diff_stderr,none": 1.1161969921345642,
						"rouge2_max,none": 36.02972585207246,
						"rouge2_max_stderr,none": 1.0310254580395297,
						"rougeL_acc,none": 0.28151774785801714,
						"rougeL_acc_stderr,none": 0.015744027248256055,
						"rougeL_diff,none": -10.485703965053574,
						"rougeL_diff_stderr,none": 0.9349583258603763,
						"rougeL_max,none": 49.35487516828091,
						"rougeL_max_stderr,none": 0.8997282130770669
					},
					"xcopa": {
						"acc,none": 0.6196363636363635,
						"acc_stderr,none": 0.0703164295227265,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43467202141900935,
						"acc_stderr,none": 0.05102073221326862,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6234883581011974,
						"acc_stderr,none": 0.055877185698984616,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8107439874129018,
						"acc_stderr,none": 0.043607796884406386,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6118940248027057,
						"acc_norm,none": 0.5899098083427283,
						"acc_norm_stderr,none": 0.08798275858466731,
						"acc_stderr,none": 0.11217891945207423,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.346875,
						"acc_stderr,none": 0.014755441858016966,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.351,
						"acc_stderr,none": 0.015100563798316403,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.348,
						"acc_stderr,none": 0.01507060460376841,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3425,
						"acc_stderr,none": 0.013704669762934725,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.37457337883959047,
						"acc_norm,none": 0.4044368600682594,
						"acc_norm_stderr,none": 0.014342036483436174,
						"acc_stderr,none": 0.014144193471893446,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7289562289562289,
						"acc_norm,none": 0.6813973063973064,
						"acc_norm_stderr,none": 0.009560775507673362,
						"acc_stderr,none": 0.0091209197417606,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8171492537313433,
						"acc_stderr,none": 0.17230992692014513,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592074,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403638,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448817,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.0096168333396958,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.764,
						"acc_stderr,none": 0.01343445140243869,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.01571976816340209,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.013232501619085332,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.01060525678479656,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256569,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474913,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571408,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033854,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787745,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487895,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178334,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037185,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.01468399195108796,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.014142984975740671,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.759,
						"acc_stderr,none": 0.013531522534515427,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.00807249435832349,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.01053479862085574,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178351,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.232,
						"acc_stderr,none": 0.013354937452281581,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340997,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.01258369378796814,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.015417317979911074,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.768,
						"acc_stderr,none": 0.01335493745228157,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280301,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946094,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866437,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.664,
						"acc_stderr,none": 0.014944140233795023,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.015663503610155283,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.632,
						"acc_stderr,none": 0.015258073561521798,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.557,
						"acc_stderr,none": 0.015716169953204105,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.01101091459599244,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858924,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653886,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785115,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.013019735539307789,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474915,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469428,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753651,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.011884495834541665,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.0158161357527732,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792942,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745029863,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.014442734941575022,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.0157049879543618,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523719,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.00715088352129544,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.597,
						"acc_stderr,none": 0.015518757419066534,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.01057013376110866,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837961,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617328,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.0111717862854965,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.0073953154557929324,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.00801893405031512,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306481,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.0046408552592747026,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.383,
						"acc_stderr,none": 0.0153801023256527,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.309,
						"acc_stderr,none": 0.014619600977206486,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.28647901916767404,
						"acc_norm,none": 0.28647901916767404,
						"acc_norm_stderr,none": 0.050274557246225864,
						"acc_stderr,none": 0.050274557246225864,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.27218934911242604,
						"acc_norm,none": 0.27218934911242604,
						"acc_norm_stderr,none": 0.03433919627548536,
						"acc_stderr,none": 0.03433919627548536,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.27702702702702703,
						"acc_norm,none": 0.27702702702702703,
						"acc_norm_stderr,none": 0.036911647897386525,
						"acc_stderr,none": 0.036911647897386525,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.033346454086653356,
						"acc_stderr,none": 0.033346454086653356,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.38125,
						"acc_norm,none": 0.38125,
						"acc_norm_stderr,none": 0.038518021388670956,
						"acc_stderr,none": 0.038518021388670956,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.28484848484848485,
						"acc_norm,none": 0.28484848484848485,
						"acc_norm_stderr,none": 0.035243908445117836,
						"acc_stderr,none": 0.035243908445117836,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3349282296650718,
						"acc_norm,none": 0.3349282296650718,
						"acc_norm_stderr,none": 0.032724910430512406,
						"acc_stderr,none": 0.032724910430512406,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.034049163262375844,
						"acc_stderr,none": 0.034049163262375844,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2595419847328244,
						"acc_norm,none": 0.2595419847328244,
						"acc_norm_stderr,none": 0.03844876139785271,
						"acc_stderr,none": 0.03844876139785271,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3088235294117647,
						"acc_norm,none": 0.3088235294117647,
						"acc_norm_stderr,none": 0.03976333292288875,
						"acc_stderr,none": 0.03976333292288875,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.045568376936747736,
						"acc_stderr,none": 0.045568376936747736,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.30030959752321984,
						"acc_norm,none": 0.30030959752321984,
						"acc_norm_stderr,none": 0.025545218898401938,
						"acc_stderr,none": 0.025545218898401938,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.03166009679399812,
						"acc_stderr,none": 0.03166009679399812,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2849162011173184,
						"acc_norm,none": 0.2849162011173184,
						"acc_norm_stderr,none": 0.03383195081328524,
						"acc_stderr,none": 0.03383195081328524,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.26582278481012656,
						"acc_norm,none": 0.26582278481012656,
						"acc_norm_stderr,none": 0.02875679962965834,
						"acc_stderr,none": 0.02875679962965834,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.16037735849056603,
						"acc_norm,none": 0.16037735849056603,
						"acc_norm_stderr,none": 0.035811206196910764,
						"acc_stderr,none": 0.035811206196910764,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004224,
						"acc_stderr,none": 0.04742907046004224,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3867924528301887,
						"acc_norm,none": 0.3867924528301887,
						"acc_norm_stderr,none": 0.04752784159123843,
						"acc_stderr,none": 0.04752784159123843,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2037037037037037,
						"acc_norm,none": 0.2037037037037037,
						"acc_norm_stderr,none": 0.03893542518824847,
						"acc_stderr,none": 0.03893542518824847,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055042,
						"acc_stderr,none": 0.04232473532055042,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.28205128205128205,
						"acc_norm,none": 0.28205128205128205,
						"acc_norm_stderr,none": 0.027285147081637318,
						"acc_stderr,none": 0.027285147081637318,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3088235294117647,
						"acc_norm,none": 0.3088235294117647,
						"acc_norm_stderr,none": 0.03242661719827218,
						"acc_stderr,none": 0.03242661719827218,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.24561403508771928,
						"acc_norm,none": 0.24561403508771928,
						"acc_norm_stderr,none": 0.03301405946987251,
						"acc_stderr,none": 0.03301405946987251,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.272108843537415,
						"acc_norm,none": 0.272108843537415,
						"acc_norm_stderr,none": 0.036832239154550236,
						"acc_stderr,none": 0.036832239154550236,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.302158273381295,
						"acc_norm,none": 0.302158273381295,
						"acc_norm_stderr,none": 0.039089144792915614,
						"acc_stderr,none": 0.039089144792915614,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.03767609312195345,
						"acc_stderr,none": 0.03767609312195345,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2822085889570552,
						"acc_norm,none": 0.2822085889570552,
						"acc_norm_stderr,none": 0.03536117886664742,
						"acc_stderr,none": 0.03536117886664742,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.03407826167337437,
						"acc_stderr,none": 0.03407826167337437,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.02874673063268137,
						"acc_stderr,none": 0.02874673063268137,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2474747474747475,
						"acc_norm,none": 0.2474747474747475,
						"acc_norm_stderr,none": 0.0307463007421245,
						"acc_stderr,none": 0.0307463007421245,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3907563025210084,
						"acc_norm,none": 0.3907563025210084,
						"acc_norm_stderr,none": 0.03169380235712997,
						"acc_stderr,none": 0.03169380235712997,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.25217391304347825,
						"acc_norm,none": 0.25217391304347825,
						"acc_norm_stderr,none": 0.02869674529449335,
						"acc_stderr,none": 0.02869674529449335,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.038532548365520024,
						"acc_stderr,none": 0.038532548365520024,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.32867132867132864,
						"acc_norm,none": 0.32867132867132864,
						"acc_norm_stderr,none": 0.0394188850126319,
						"acc_stderr,none": 0.0394188850126319,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26136363636363635,
						"acc_norm,none": 0.26136363636363635,
						"acc_norm_stderr,none": 0.033213825516355905,
						"acc_stderr,none": 0.033213825516355905,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.31543624161073824,
						"acc_norm,none": 0.31543624161073824,
						"acc_norm_stderr,none": 0.03819723167141383,
						"acc_stderr,none": 0.03819723167141383,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03744254928577061,
						"acc_stderr,none": 0.03744254928577061,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714262,
						"acc_stderr,none": 0.04025566684714262,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.21818181818181817,
						"acc_norm,none": 0.21818181818181817,
						"acc_norm_stderr,none": 0.03955932861795833,
						"acc_stderr,none": 0.03955932861795833,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.037950002128017815,
						"acc_stderr,none": 0.037950002128017815,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604672,
						"acc_stderr,none": 0.03893259610604672,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.0316293039569795,
						"acc_stderr,none": 0.0316293039569795,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.313953488372093,
						"acc_norm,none": 0.313953488372093,
						"acc_norm_stderr,none": 0.03549043982227172,
						"acc_stderr,none": 0.03549043982227172,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2846715328467153,
						"acc_norm,none": 0.2846715328467153,
						"acc_norm_stderr,none": 0.02228603692971729,
						"acc_stderr,none": 0.02228603692971729,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.35046728971962615,
						"acc_norm,none": 0.35046728971962615,
						"acc_norm_stderr,none": 0.03269147055032478,
						"acc_stderr,none": 0.03269147055032478,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.04119323030208567,
						"acc_stderr,none": 0.04119323030208567,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3114754098360656,
						"acc_norm,none": 0.3114754098360656,
						"acc_norm_stderr,none": 0.0420996926731014,
						"acc_stderr,none": 0.0420996926731014,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.03184006730473941,
						"acc_stderr,none": 0.03184006730473941,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3111111111111111,
						"acc_norm,none": 0.3111111111111111,
						"acc_norm_stderr,none": 0.03460236918732732,
						"acc_stderr,none": 0.03460236918732732,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2804232804232804,
						"acc_norm,none": 0.2804232804232804,
						"acc_norm_stderr,none": 0.03276171742795849,
						"acc_stderr,none": 0.03276171742795849,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.041265147363240995,
						"acc_stderr,none": 0.041265147363240995,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2827586206896552,
						"acc_norm,none": 0.2827586206896552,
						"acc_norm_stderr,none": 0.03752833958003337,
						"acc_stderr,none": 0.03752833958003337,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.0451367671816831,
						"acc_stderr,none": 0.0451367671816831,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26285714285714284,
						"acc_norm,none": 0.26285714285714284,
						"acc_norm_stderr,none": 0.033370375852212746,
						"acc_stderr,none": 0.033370375852212746,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846666,
						"acc_stderr,none": 0.030469670650846666,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.26595744680851063,
						"acc_norm,none": 0.26595744680851063,
						"acc_norm_stderr,none": 0.02281660701013529,
						"acc_stderr,none": 0.02281660701013529,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.030439132051887946,
						"acc_stderr,none": 0.030439132051887946,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3218390804597701,
						"acc_norm,none": 0.3218390804597701,
						"acc_norm_stderr,none": 0.03551916251914105,
						"acc_stderr,none": 0.03551916251914105,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03820169914517905,
						"acc_stderr,none": 0.03820169914517905,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.2920353982300885,
						"acc_norm,none": 0.2920353982300885,
						"acc_norm_stderr,none": 0.030313233223988538,
						"acc_stderr,none": 0.030313233223988538,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624335,
						"acc_stderr,none": 0.03546563019624335,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741764,
						"acc_stderr,none": 0.03351597731741764,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2981366459627329,
						"acc_norm,none": 0.2981366459627329,
						"acc_norm_stderr,none": 0.0361637928646202,
						"acc_stderr,none": 0.0361637928646202,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.035156741348767645,
						"acc_stderr,none": 0.035156741348767645,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.03588702812826371,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.478427227251072,
						"acc_stderr,none": 0.006574043000490415,
						"alias": "glue",
						"f1,none": 0.611608824637493,
						"f1_stderr,none": 0.0004451790155942701,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.5248954391555467,
						"acc_norm,none": 0.7081258713403704,
						"acc_norm_stderr,none": 0.004536955796510542,
						"acc_stderr,none": 0.0049835924109341645,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7109450805356103,
						"acc_stderr,none": 0.01698027072844337,
						"alias": "lambada",
						"perplexity,none": 3.856322751461975,
						"perplexity_stderr,none": 0.23812369544017875
					},
					"lambada_multilingual": {
						"acc,none": 0.5314185911119736,
						"acc_stderr,none": 0.0829932804405175,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.897574677159362,
						"perplexity_stderr,none": 8.42612658886791
					},
					"lambada_openai": {
						"acc,none": 0.7424801086745585,
						"acc_stderr,none": 0.006091999719129261,
						"alias": " - lambada_openai",
						"perplexity,none": 3.406426465819982,
						"perplexity_stderr,none": 0.06704743442173354
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4139336308946245,
						"acc_stderr,none": 0.006862001830409199,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 36.35379606382931,
						"perplexity_stderr,none": 2.008236491704191
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7420919852513099,
						"acc_stderr,none": 0.006094995125652968,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.407790602586324,
						"perplexity_stderr,none": 0.06707722568448932
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4513875412381137,
						"acc_stderr,none": 0.00693297588836862,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.344677182932987,
						"perplexity_stderr,none": 1.427735591059868
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5451193479526489,
						"acc_stderr,none": 0.006937557627205973,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 17.251332170249267,
						"perplexity_stderr,none": 0.8338510485478287
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.504560450223171,
						"acc_stderr,none": 0.006965687898451474,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 23.13027736619891,
						"perplexity_stderr,none": 1.2202229201521486
					},
					"lambada_standard": {
						"acc,none": 0.6794100523966622,
						"acc_stderr,none": 0.006502090459040077,
						"alias": " - lambada_standard",
						"perplexity,none": 4.304406741074659,
						"perplexity_stderr,none": 0.09220385628285603
					},
					"logiqa": {
						"acc,none": 0.2350230414746544,
						"acc_norm,none": 0.271889400921659,
						"acc_norm_stderr,none": 0.01745171600943683,
						"acc_stderr,none": 0.01663116682389096,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3012391397236861,
						"acc_stderr,none": 0.05136502401683466,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036845,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34814814814814815,
						"acc_stderr,none": 0.041153246103369526,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.27631578947368424,
						"acc_stderr,none": 0.03639057569952925,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.38113207547169814,
						"acc_stderr,none": 0.02989060968628662,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2569444444444444,
						"acc_stderr,none": 0.03653946969442099,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816508,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3063583815028902,
						"acc_stderr,none": 0.03514942551267439,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171453,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621505,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.31063829787234043,
						"acc_stderr,none": 0.03025123757921317,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03947152782669415,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.037245636197746325,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2275132275132275,
						"acc_stderr,none": 0.021591269407823768,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.04073524322147126,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3967741935483871,
						"acc_stderr,none": 0.027831231605767944,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.26108374384236455,
						"acc_stderr,none": 0.0309037969521145,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.37575757575757573,
						"acc_stderr,none": 0.03781887353205983,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.03358618145732523,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.38341968911917096,
						"acc_stderr,none": 0.03508984236295342,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2743589743589744,
						"acc_stderr,none": 0.02262276576749322,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.026719240783712166,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2773109243697479,
						"acc_stderr,none": 0.029079374539480007,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2781456953642384,
						"acc_stderr,none": 0.03658603262763743,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3192660550458716,
						"acc_stderr,none": 0.01998782906975002,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.20833333333333334,
						"acc_stderr,none": 0.027696910713093936,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.3627450980392157,
						"acc_stderr,none": 0.03374499356319355,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.32489451476793246,
						"acc_stderr,none": 0.030486039389105293,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.36771300448430494,
						"acc_stderr,none": 0.03236198350928276,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.32061068702290074,
						"acc_stderr,none": 0.04093329229834278,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2879914984059511,
						"acc_stderr,none": 0.041994425715844975,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2975206611570248,
						"acc_stderr,none": 0.04173349148083499,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3425925925925926,
						"acc_stderr,none": 0.045879047413018105,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.32515337423312884,
						"acc_stderr,none": 0.03680350371286461,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3125,
						"acc_stderr,none": 0.043994650575715215,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.3300970873786408,
						"acc_stderr,none": 0.0465614711001235,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.36324786324786323,
						"acc_stderr,none": 0.03150712523091264,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.39719029374201786,
						"acc_stderr,none": 0.017497905037159367,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.29190751445086704,
						"acc_stderr,none": 0.02447699407624732,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574898,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.30718954248366015,
						"acc_stderr,none": 0.026415601914388995,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.33859028001287417,
						"acc_stderr,none": 0.04914440674842426,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.35691318327974275,
						"acc_stderr,none": 0.027210420375934016,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.30864197530864196,
						"acc_stderr,none": 0.025702640260603756,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2553191489361702,
						"acc_stderr,none": 0.026011992930902002,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2561929595827901,
						"acc_stderr,none": 0.01114917315311058,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.27205882352941174,
						"acc_stderr,none": 0.027033041151681456,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.30392156862745096,
						"acc_stderr,none": 0.01860755213127983,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.0469237132203465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.20816326530612245,
						"acc_stderr,none": 0.025991117672813296,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3067923301917452,
						"acc_stderr,none": 0.050380435586824285,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.39303482587064675,
						"acc_stderr,none": 0.0345368246603156,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2787821122740247,
						"acc_stderr,none": 0.05591343384805093,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3313253012048193,
						"acc_stderr,none": 0.036643147772880864,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3567251461988304,
						"acc_stderr,none": 0.036740130028609534,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3355068772287315,
						"acc_stderr,none": 0.004766207347590958,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3318755085435313,
						"acc_stderr,none": 0.0047491670928415915,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7450980392156863,
						"acc_stderr,none": 0.02160210604737706,
						"alias": " - mrpc",
						"f1,none": 0.838006230529595,
						"f1_stderr,none": 0.015668560640891917
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.03656509695290859,
						"exact_match_stderr,remove_whitespace": 0.0031242883650895257
					},
					"openbookqa": {
						"acc,none": 0.302,
						"acc_norm,none": 0.41,
						"acc_norm_stderr,none": 0.022017482578127676,
						"acc_stderr,none": 0.020553269174209205,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.429,
						"acc_stderr,none": 0.011069813475627658,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.377,
						"acc_stderr,none": 0.010839476330688819,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4075,
						"acc_stderr,none": 0.010990098549743105,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.547,
						"acc_stderr,none": 0.011133619300989868,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.549,
						"acc_stderr,none": 0.011129305041886325,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.011172305500884881,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5165,
						"acc_stderr,none": 0.011177045144808287,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.47828571428571426,
						"acc_stderr,none": 0.0499793595859735,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7611534276387377,
						"acc_norm,none": 0.7682263329706203,
						"acc_norm_stderr,none": 0.00984514377279403,
						"acc_stderr,none": 0.009948120385337506,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7226932236450248,
						"acc_norm,none": 0.5948231683245433,
						"acc_norm_stderr,none": 0.009884300653715268,
						"acc_stderr,none": 0.15995504154556037,
						"alias": "pythia",
						"bits_per_byte,none": 0.6351280330753049,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5530755727664614,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.406426465819982,
						"perplexity_stderr,none": 0.06704743442173354,
						"word_perplexity,none": 10.529140780319349,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49899322716456157,
						"acc_stderr,none": 0.006765396837036612,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.5328716299777393,
						"acc_stderr,none": 0.0024813208331079402,
						"alias": " - qqp",
						"f1,none": 0.6096482162787814,
						"f1_stderr,none": 0.0026150433094118963
					},
					"record": {
						"alias": "record",
						"em,none": 0.273,
						"em_stderr,none": 0.004455231184628843,
						"f1,none": 0.28315523835122586,
						"f1_stderr,none": 0.004465563225262287
					},
					"rte": {
						"acc,none": 0.6534296028880866,
						"acc_stderr,none": 0.028644456994557532,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.941,
						"acc_norm,none": 0.909,
						"acc_norm_stderr,none": 0.00909954953840024,
						"acc_stderr,none": 0.007454835650406727,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9002293577981652,
						"acc_stderr,none": 0.010154741963033096,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.31445747037528976,
						"acc_stderr,none": 0.0014673143279740634,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31701346389228885,
						"bleu_acc_stderr,none": 0.01628920337440338,
						"bleu_diff,none": -7.9053171764337655,
						"bleu_diff_stderr,none": 0.8617889123250447,
						"bleu_max,none": 27.104666977072068,
						"bleu_max_stderr,none": 0.8085762180228528,
						"rouge1_acc,none": 0.2778457772337821,
						"rouge1_acc_stderr,none": 0.015680929364024643,
						"rouge1_diff,none": -10.123536950929477,
						"rouge1_diff_stderr,none": 0.9186378607102484,
						"rouge1_max,none": 52.031208472926004,
						"rouge1_max_stderr,none": 0.8808710259528939,
						"rouge2_acc,none": 0.25703794369645044,
						"rouge2_acc_stderr,none": 0.015298077509485088,
						"rouge2_diff,none": -12.289989382651818,
						"rouge2_diff_stderr,none": 1.1161969921345642,
						"rouge2_max,none": 36.02972585207246,
						"rouge2_max_stderr,none": 1.0310254580395297,
						"rougeL_acc,none": 0.28151774785801714,
						"rougeL_acc_stderr,none": 0.015744027248256055,
						"rougeL_diff,none": -10.485703965053574,
						"rougeL_diff_stderr,none": 0.9349583258603763,
						"rougeL_max,none": 49.35487516828091,
						"rougeL_max_stderr,none": 0.8997282130770669
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.31701346389228885,
						"bleu_acc_stderr,none": 0.01628920337440338,
						"bleu_diff,none": -7.9053171764337655,
						"bleu_diff_stderr,none": 0.8617889123250447,
						"bleu_max,none": 27.104666977072068,
						"bleu_max_stderr,none": 0.8085762180228528,
						"rouge1_acc,none": 0.2778457772337821,
						"rouge1_acc_stderr,none": 0.015680929364024643,
						"rouge1_diff,none": -10.123536950929477,
						"rouge1_diff_stderr,none": 0.9186378607102484,
						"rouge1_max,none": 52.031208472926004,
						"rouge1_max_stderr,none": 0.8808710259528939,
						"rouge2_acc,none": 0.25703794369645044,
						"rouge2_acc_stderr,none": 0.015298077509485088,
						"rouge2_diff,none": -12.289989382651818,
						"rouge2_diff_stderr,none": 1.1161969921345642,
						"rouge2_max,none": 36.02972585207246,
						"rouge2_max_stderr,none": 1.0310254580395297,
						"rougeL_acc,none": 0.28151774785801714,
						"rougeL_acc_stderr,none": 0.015744027248256055,
						"rougeL_diff,none": -10.485703965053574,
						"rougeL_diff_stderr,none": 0.9349583258603763,
						"rougeL_max,none": 49.35487516828091,
						"rougeL_max_stderr,none": 0.8997282130770669
					},
					"truthfulqa_mc1": {
						"acc,none": 0.24357405140758873,
						"acc_stderr,none": 0.01502635482491078,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.38534088934299077,
						"acc_stderr,none": 0.013975952239394244,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6351280330753049,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5530755727664614,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.529140780319349,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6732438831886346,
						"acc_stderr,none": 0.013181997302131354,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5492957746478874,
						"acc_stderr,none": 0.05947027187737999,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.46153846153846156,
						"acc_stderr,none": 0.04912048887947828,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6196363636363635,
						"acc_stderr,none": 0.0703164295227265,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.592,
						"acc_stderr,none": 0.02200091089387719,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.020186703693570847,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.019684688820194713,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269948,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.02203367799374086,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.022149790663861926,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.02161328916516578,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.020395095484936596,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.02051442622562805,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43467202141900935,
						"acc_stderr,none": 0.05102073221326862,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3365461847389558,
						"acc_stderr,none": 0.009471423054177143,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4718875502008032,
						"acc_stderr,none": 0.010006219242553599,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4847389558232932,
						"acc_stderr,none": 0.010017403508578986,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3859437751004016,
						"acc_stderr,none": 0.00975783884206334,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5337349397590362,
						"acc_stderr,none": 0.009999235684721606,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5016064257028112,
						"acc_stderr,none": 0.010022021141102094,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.506425702811245,
						"acc_stderr,none": 0.010021245217159382,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42771084337349397,
						"acc_stderr,none": 0.009916774564942348,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.491566265060241,
						"acc_stderr,none": 0.010020647068114176,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.38473895582329315,
						"acc_stderr,none": 0.009752149307152517,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.39598393574297186,
						"acc_stderr,none": 0.009802809888502344,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.45542168674698796,
						"acc_stderr,none": 0.00998216114757631,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.39839357429718875,
						"acc_stderr,none": 0.00981295816527095,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3927710843373494,
						"acc_stderr,none": 0.009788891787583067,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3526104417670683,
						"acc_stderr,none": 0.009576746271768752,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6234883581011974,
						"acc_stderr,none": 0.055877185698984616,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5830575777630708,
						"acc_stderr,none": 0.012688354121607806,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7822634017207147,
						"acc_stderr,none": 0.010620714860047854,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7088021178027796,
						"acc_stderr,none": 0.011691443511878188,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5579086697551291,
						"acc_stderr,none": 0.012780536370279766,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5969556585043018,
						"acc_stderr,none": 0.012622895215907705,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6578424884182661,
						"acc_stderr,none": 0.012209152707472833,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5195234943745863,
						"acc_stderr,none": 0.01285731253183685,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6743878226340172,
						"acc_stderr,none": 0.012059150226422295,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5512905360688286,
						"acc_stderr,none": 0.012799246690109742,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.587028457974851,
						"acc_stderr,none": 0.012670716290966727,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6393117140966248,
						"acc_stderr,none": 0.01235759268213903,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8107439874129018,
						"acc_stderr,none": 0.043607796884406386,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8683870967741936,
						"acc_stderr,none": 0.007012741874121946,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109369,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7382690302398331,
						"acc_stderr,none": 0.014202085663400704,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7984790874524715,
						"acc_stderr,none": 0.02478227592096154,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6793650793650794,
						"acc_stderr,none": 0.026338570219814044,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7837301587301587,
						"acc_stderr,none": 0.01835681232408577,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/chunk6-0_85_pth"
	},
	"./rwkv-x-dev/chunk7-1-0_85_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6141488162344984,
						"acc_norm,none": 0.5980834272829764,
						"acc_norm_stderr,none": 0.09099416542879085,
						"acc_stderr,none": 0.11123797876888815,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.014852252997814537,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8186417910447761,
						"acc_stderr,none": 0.17013596047303364,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.29675358314626143,
						"acc_norm,none": 0.29675358314626143,
						"acc_norm_stderr,none": 0.052571934571443074,
						"acc_stderr,none": 0.052571934571443074,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5134846932532218,
						"acc_stderr,none": 0.10171378009864455,
						"alias": "glue",
						"f1,none": 0.6386656348077252,
						"f1_stderr,none": 0.0003387753790175794,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.7085193091403066,
						"acc_stderr,none": 0.01694109004115522,
						"alias": "lambada",
						"perplexity,none": 3.9014703018235384,
						"perplexity_stderr,none": 0.2594797241265247
					},
					"lambada_multilingual": {
						"acc,none": 0.5309528430040753,
						"acc_stderr,none": 0.08639178331714076,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.69071858731384,
						"perplexity_stderr,none": 8.552988222187404
					},
					"mmlu": {
						"acc,none": 0.2947585813986612,
						"acc_stderr,none": 0.04917511800903342,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2824654622741765,
						"acc_stderr,none": 0.040870384074638735,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3324750563244287,
						"acc_stderr,none": 0.04757686754396759,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.29216769580760477,
						"acc_stderr,none": 0.04434698500716479,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.278464954012052,
						"acc_stderr,none": 0.05600175119809852,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.47764285714285715,
						"acc_stderr,none": 0.04847792303759187,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7229046473512126,
						"acc_norm,none": 0.6028010843815743,
						"acc_norm_stderr,none": 0.0103565013317006,
						"acc_stderr,none": 0.1580975061550171,
						"alias": "pythia",
						"bits_per_byte,none": 0.6354231617249144,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5533933141943035,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.409767320201968,
						"perplexity_stderr,none": 0.06717001201762643,
						"word_perplexity,none": 10.540665068527451,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3095892224575064,
						"acc_stderr,none": 0.0014655154419428882,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.016095884155386847,
						"bleu_diff,none": -8.687369852615783,
						"bleu_diff_stderr,none": 0.8684570990002328,
						"bleu_max,none": 26.846250747826357,
						"bleu_max_stderr,none": 0.8068451098676166,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.01532182168847618,
						"rouge1_diff,none": -11.078552930908804,
						"rouge1_diff_stderr,none": 0.9205243229676898,
						"rouge1_max,none": 51.74706804161411,
						"rouge1_max_stderr,none": 0.8810481797202292,
						"rouge2_acc,none": 0.23990208078335373,
						"rouge2_acc_stderr,none": 0.014948812679062142,
						"rouge2_diff,none": -13.418690797496167,
						"rouge2_diff_stderr,none": 1.1153164657530568,
						"rouge2_max,none": 35.49351791798558,
						"rouge2_max_stderr,none": 1.032756634715349,
						"rougeL_acc,none": 0.2582619339045288,
						"rougeL_acc_stderr,none": 0.01532182168847618,
						"rougeL_diff,none": -11.544212877659202,
						"rougeL_diff_stderr,none": 0.9352180265777663,
						"rougeL_max,none": 48.98540483001104,
						"rougeL_max_stderr,none": 0.9008156163507788
					},
					"xcopa": {
						"acc,none": 0.6176363636363637,
						"acc_stderr,none": 0.06535555046798423,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43295850066934405,
						"acc_stderr,none": 0.051432181581821335,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6267372600926537,
						"acc_stderr,none": 0.05526328579177065,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8118678354686446,
						"acc_stderr,none": 0.03618077047283569,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6141488162344984,
						"acc_norm,none": 0.5980834272829764,
						"acc_norm_stderr,none": 0.09099416542879085,
						"acc_stderr,none": 0.11123797876888815,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.014852252997814537,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.348,
						"acc_stderr,none": 0.01507060460376841,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.357,
						"acc_stderr,none": 0.015158521721486767,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3458333333333333,
						"acc_stderr,none": 0.013736245342311012,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.378839590443686,
						"acc_norm,none": 0.4061433447098976,
						"acc_norm_stderr,none": 0.014351656690097856,
						"acc_stderr,none": 0.014175915490000322,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7302188552188552,
						"acc_norm,none": 0.6927609427609428,
						"acc_norm_stderr,none": 0.009466688832475378,
						"acc_stderr,none": 0.009107527914671064,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8186417910447761,
						"acc_stderr,none": 0.17013596047303364,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.00932045443478324,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.003299983316607816,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098728,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.824,
						"acc_stderr,none": 0.012048616898597509,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340973,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.757,
						"acc_stderr,none": 0.01356964019917745,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.549,
						"acc_stderr,none": 0.015743152379585533,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.768,
						"acc_stderr,none": 0.013354937452281584,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837957,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469293,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036376,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792942,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380705,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118751,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.00918887563499669,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244085,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.00453647215130647,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.689,
						"acc_stderr,none": 0.014645596385722695,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.729,
						"acc_stderr,none": 0.014062601350986186,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.01360735683959812,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280308,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992452,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306508,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.265,
						"acc_stderr,none": 0.013963164754809949,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621235,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.012559527926707366,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.015431725053866613,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.775,
						"acc_stderr,none": 0.01321172015861476,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919306,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849874,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491122,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.015070604603768408,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.445,
						"acc_stderr,none": 0.015723301886760938,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.015459721957493384,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.562,
						"acc_stderr,none": 0.01569721001969469,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783241,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.653,
						"acc_stderr,none": 0.01506047203170662,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524306,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847162,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.773,
						"acc_stderr,none": 0.01325317496476394,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406726,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578237,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942295,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.824,
						"acc_stderr,none": 0.012048616898597505,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.015818160898606715,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571411,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.003444977194099816,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.711,
						"acc_stderr,none": 0.014341711358296186,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.445,
						"acc_stderr,none": 0.015723301886760944,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866435,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380705,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.601,
						"acc_stderr,none": 0.015493193313162908,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108663,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108665,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.01281855355784398,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.011419913065098694,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406728,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315119,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611486,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178364,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.015431725053866615,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.306,
						"acc_stderr,none": 0.014580006055436964,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.29675358314626143,
						"acc_norm,none": 0.29675358314626143,
						"acc_norm_stderr,none": 0.052571934571443074,
						"acc_stderr,none": 0.052571934571443074,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676976,
						"acc_stderr,none": 0.03410167836676976,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2905405405405405,
						"acc_norm,none": 0.2905405405405405,
						"acc_norm_stderr,none": 0.03744626397928734,
						"acc_stderr,none": 0.03744626397928734,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.38125,
						"acc_norm,none": 0.38125,
						"acc_norm_stderr,none": 0.03851802138867094,
						"acc_stderr,none": 0.03851802138867094,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3151515151515151,
						"acc_norm,none": 0.3151515151515151,
						"acc_norm_stderr,none": 0.0362773057502241,
						"acc_stderr,none": 0.0362773057502241,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3397129186602871,
						"acc_norm,none": 0.3397129186602871,
						"acc_norm_stderr,none": 0.03283906353745932,
						"acc_stderr,none": 0.03283906353745932,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.035156741348767645,
						"acc_stderr,none": 0.035156741348767645,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2900763358778626,
						"acc_norm,none": 0.2900763358778626,
						"acc_norm_stderr,none": 0.03980066246467765,
						"acc_stderr,none": 0.03980066246467765,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.34558823529411764,
						"acc_norm,none": 0.34558823529411764,
						"acc_norm_stderr,none": 0.04092966025145302,
						"acc_stderr,none": 0.04092966025145302,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.35514018691588783,
						"acc_norm,none": 0.35514018691588783,
						"acc_norm_stderr,none": 0.04648144634449114,
						"acc_stderr,none": 0.04648144634449114,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3219814241486068,
						"acc_norm,none": 0.3219814241486068,
						"acc_norm_stderr,none": 0.026038038744338656,
						"acc_stderr,none": 0.026038038744338656,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2696078431372549,
						"acc_norm,none": 0.2696078431372549,
						"acc_norm_stderr,none": 0.03114557065948678,
						"acc_stderr,none": 0.03114557065948678,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2569832402234637,
						"acc_norm,none": 0.2569832402234637,
						"acc_norm_stderr,none": 0.03275229252356166,
						"acc_stderr,none": 0.03275229252356166,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2742616033755274,
						"acc_norm,none": 0.2742616033755274,
						"acc_norm_stderr,none": 0.029041333510598035,
						"acc_stderr,none": 0.029041333510598035,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.16981132075471697,
						"acc_norm,none": 0.16981132075471697,
						"acc_norm_stderr,none": 0.036641823111517896,
						"acc_stderr,none": 0.036641823111517896,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.40186915887850466,
						"acc_norm,none": 0.40186915887850466,
						"acc_norm_stderr,none": 0.04761979313593575,
						"acc_stderr,none": 0.04761979313593575,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3867924528301887,
						"acc_norm,none": 0.3867924528301887,
						"acc_norm_stderr,none": 0.04752784159123843,
						"acc_stderr,none": 0.04752784159123843,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.19444444444444445,
						"acc_norm,none": 0.19444444444444445,
						"acc_norm_stderr,none": 0.038260763248848646,
						"acc_stderr,none": 0.038260763248848646,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055044,
						"acc_stderr,none": 0.04232473532055044,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.04252016223763312,
						"acc_stderr,none": 0.04252016223763312,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.27106227106227104,
						"acc_norm,none": 0.27106227106227104,
						"acc_norm_stderr,none": 0.026952266920703325,
						"acc_stderr,none": 0.026952266920703325,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.033086111132364364,
						"acc_stderr,none": 0.033086111132364364,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.24561403508771928,
						"acc_norm,none": 0.24561403508771928,
						"acc_norm_stderr,none": 0.033014059469872514,
						"acc_stderr,none": 0.033014059469872514,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.036538475108960564,
						"acc_stderr,none": 0.036538475108960564,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.33093525179856115,
						"acc_norm,none": 0.33093525179856115,
						"acc_norm_stderr,none": 0.040055858725395806,
						"acc_stderr,none": 0.040055858725395806,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.03767609312195345,
						"acc_stderr,none": 0.03767609312195345,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3128834355828221,
						"acc_norm,none": 0.3128834355828221,
						"acc_norm_stderr,none": 0.036429145782924055,
						"acc_stderr,none": 0.036429145782924055,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.034516288762506196,
						"acc_stderr,none": 0.034516288762506196,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.30158730158730157,
						"acc_norm,none": 0.30158730158730157,
						"acc_norm_stderr,none": 0.028968481368260034,
						"acc_stderr,none": 0.028968481368260034,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2676767676767677,
						"acc_norm,none": 0.2676767676767677,
						"acc_norm_stderr,none": 0.03154449888270285,
						"acc_stderr,none": 0.03154449888270285,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.40756302521008403,
						"acc_norm,none": 0.40756302521008403,
						"acc_norm_stderr,none": 0.03191863374478465,
						"acc_stderr,none": 0.03191863374478465,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2565217391304348,
						"acc_norm,none": 0.2565217391304348,
						"acc_norm_stderr,none": 0.02885881431530564,
						"acc_stderr,none": 0.02885881431530564,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.0391545063041425,
						"acc_stderr,none": 0.0391545063041425,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.32867132867132864,
						"acc_norm,none": 0.32867132867132864,
						"acc_norm_stderr,none": 0.0394188850126319,
						"acc_stderr,none": 0.0394188850126319,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26704545454545453,
						"acc_norm,none": 0.26704545454545453,
						"acc_norm_stderr,none": 0.033443528500791256,
						"acc_stderr,none": 0.033443528500791256,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3221476510067114,
						"acc_norm,none": 0.3221476510067114,
						"acc_norm_stderr,none": 0.038411757592369186,
						"acc_stderr,none": 0.038411757592369186,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552484,
						"acc_stderr,none": 0.03703667194552484,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2711864406779661,
						"acc_norm,none": 0.2711864406779661,
						"acc_norm_stderr,none": 0.04110070549339208,
						"acc_stderr,none": 0.04110070549339208,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.040139645540727735,
						"acc_stderr,none": 0.040139645540727735,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3006993006993007,
						"acc_norm,none": 0.3006993006993007,
						"acc_norm_stderr,none": 0.03848167949490064,
						"acc_stderr,none": 0.03848167949490064,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.0404061017820884,
						"acc_stderr,none": 0.0404061017820884,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741764,
						"acc_stderr,none": 0.03351597731741764,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.313953488372093,
						"acc_norm,none": 0.313953488372093,
						"acc_norm_stderr,none": 0.035490439822271715,
						"acc_stderr,none": 0.035490439822271715,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.30656934306569344,
						"acc_norm,none": 0.30656934306569344,
						"acc_norm_stderr,none": 0.02277057263105725,
						"acc_stderr,none": 0.02277057263105725,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3598130841121495,
						"acc_norm,none": 0.3598130841121495,
						"acc_norm_stderr,none": 0.03288531991318828,
						"acc_stderr,none": 0.03288531991318828,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2845528455284553,
						"acc_norm,none": 0.2845528455284553,
						"acc_norm_stderr,none": 0.040849837332392225,
						"acc_stderr,none": 0.040849837332392225,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3442622950819672,
						"acc_norm,none": 0.3442622950819672,
						"acc_norm_stderr,none": 0.04319337331204006,
						"acc_stderr,none": 0.04319337331204006,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.031840067304739414,
						"acc_stderr,none": 0.031840067304739414,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.34444444444444444,
						"acc_norm,none": 0.34444444444444444,
						"acc_norm_stderr,none": 0.03551712696743982,
						"acc_stderr,none": 0.03551712696743982,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31746031746031744,
						"acc_norm,none": 0.31746031746031744,
						"acc_norm_stderr,none": 0.033949216164478796,
						"acc_stderr,none": 0.033949216164478796,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3017241379310345,
						"acc_norm,none": 0.3017241379310345,
						"acc_norm_stderr,none": 0.0428025479250546,
						"acc_stderr,none": 0.0428025479250546,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.32413793103448274,
						"acc_norm,none": 0.32413793103448274,
						"acc_norm_stderr,none": 0.03900432069185554,
						"acc_stderr,none": 0.03900432069185554,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.04622501635210239,
						"acc_stderr,none": 0.04622501635210239,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.032888897342098204,
						"acc_stderr,none": 0.032888897342098204,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846662,
						"acc_stderr,none": 0.030469670650846662,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2579787234042553,
						"acc_norm,none": 0.2579787234042553,
						"acc_norm_stderr,none": 0.022593550801056256,
						"acc_stderr,none": 0.022593550801056256,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.31896551724137934,
						"acc_norm,none": 0.31896551724137934,
						"acc_norm_stderr,none": 0.03066552670940149,
						"acc_stderr,none": 0.03066552670940149,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3218390804597701,
						"acc_norm,none": 0.3218390804597701,
						"acc_norm_stderr,none": 0.03551916251914105,
						"acc_stderr,none": 0.03551916251914105,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.037857144650666544,
						"acc_stderr,none": 0.037857144650666544,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3185840707964602,
						"acc_norm,none": 0.3185840707964602,
						"acc_norm_stderr,none": 0.031061820840326125,
						"acc_stderr,none": 0.031061820840326125,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.035014387062967806,
						"acc_stderr,none": 0.035014387062967806,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.033695536918777164,
						"acc_stderr,none": 0.033695536918777164,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.035608465375867336,
						"acc_stderr,none": 0.035608465375867336,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.03637652289278587,
						"acc_stderr,none": 0.03637652289278587,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.03515674134876764,
						"acc_stderr,none": 0.03515674134876764,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.03588702812826371,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5134846932532218,
						"acc_stderr,none": 0.10171378009864455,
						"alias": "glue",
						"f1,none": 0.6386656348077252,
						"f1_stderr,none": 0.0003387753790175794,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.5259908384783908,
						"acc_norm,none": 0.7092212706632145,
						"acc_norm_stderr,none": 0.004531935391507,
						"acc_stderr,none": 0.004983035420235718,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7085193091403066,
						"acc_stderr,none": 0.01694109004115522,
						"alias": "lambada",
						"perplexity,none": 3.9014703018235384,
						"perplexity_stderr,none": 0.2594797241265247
					},
					"lambada_multilingual": {
						"acc,none": 0.5309528430040753,
						"acc_stderr,none": 0.08639178331714076,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.69071858731384,
						"perplexity_stderr,none": 8.552988222187404
					},
					"lambada_openai": {
						"acc,none": 0.7403454298466913,
						"acc_stderr,none": 0.006108397042730497,
						"alias": " - lambada_openai",
						"perplexity,none": 3.409767320201968,
						"perplexity_stderr,none": 0.06717001201762643
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4137395691830002,
						"acc_stderr,none": 0.006861528841487101,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 36.165077339416754,
						"perplexity_stderr,none": 1.9861512140381428
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.740151368135067,
						"acc_stderr,none": 0.006109878348081184,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.4089509612043556,
						"perplexity_stderr,none": 0.06710764637085131
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45158160294973804,
						"acc_stderr,none": 0.006933239470474417,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.05924687932952,
						"perplexity_stderr,none": 1.4164218796436234
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5466718416456433,
						"acc_stderr,none": 0.006935563830841054,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 17.083069818920887,
						"perplexity_stderr,none": 0.8255688660062247
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.502619833106928,
						"acc_stderr,none": 0.006965882034205061,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.73724793769769,
						"perplexity_stderr,none": 1.2075262907685957
					},
					"lambada_standard": {
						"acc,none": 0.6770813118571706,
						"acc_stderr,none": 0.006514469814384397,
						"alias": " - lambada_standard",
						"perplexity,none": 4.393487376040846,
						"perplexity_stderr,none": 0.09539679501144781
					},
					"logiqa": {
						"acc,none": 0.24270353302611367,
						"acc_norm,none": 0.2764976958525346,
						"acc_norm_stderr,none": 0.017543209075825187,
						"acc_stderr,none": 0.016815676206479526,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.2947585813986612,
						"acc_stderr,none": 0.04917511800903342,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036846,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34074074074074073,
						"acc_stderr,none": 0.040943762699967926,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.03583496176361064,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421276,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3622641509433962,
						"acc_stderr,none": 0.029582245128384303,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3194444444444444,
						"acc_stderr,none": 0.03899073687357334,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2543352601156069,
						"acc_stderr,none": 0.0332055644308557,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.18627450980392157,
						"acc_stderr,none": 0.03873958714149351,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3404255319148936,
						"acc_stderr,none": 0.03097669299853443,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21929824561403508,
						"acc_stderr,none": 0.03892431106518752,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.037245636197746325,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.23544973544973544,
						"acc_stderr,none": 0.021851509822031708,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.04073524322147126,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3741935483870968,
						"acc_stderr,none": 0.027528904299845783,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.03144712581678243,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.38181818181818183,
						"acc_stderr,none": 0.03793713171165634,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3282828282828283,
						"acc_stderr,none": 0.03345678422756777,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.36787564766839376,
						"acc_stderr,none": 0.03480175668466036,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.258974358974359,
						"acc_stderr,none": 0.02221110681006166,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.026067159222275805,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.27310924369747897,
						"acc_stderr,none": 0.028942004040998167,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2913907284768212,
						"acc_stderr,none": 0.037101857261199946,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.30091743119266057,
						"acc_stderr,none": 0.019664751366802114,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.19444444444444445,
						"acc_stderr,none": 0.026991454502036716,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.3284313725490196,
						"acc_stderr,none": 0.032962451101722294,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.29535864978902954,
						"acc_stderr,none": 0.029696338713422882,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3811659192825112,
						"acc_stderr,none": 0.03259625118416827,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2900763358778626,
						"acc_stderr,none": 0.03980066246467766,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2824654622741765,
						"acc_stderr,none": 0.040870384074638735,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.30578512396694213,
						"acc_stderr,none": 0.042059539338841226,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.04414343666854933,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.34355828220858897,
						"acc_stderr,none": 0.037311335196738925,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.04432804055291519,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.32038834951456313,
						"acc_stderr,none": 0.0462028408228004,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.36752136752136755,
						"acc_stderr,none": 0.03158539157745636,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.39719029374201786,
						"acc_stderr,none": 0.017497905037159367,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2832369942196532,
						"acc_stderr,none": 0.024257901705323385,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23687150837988827,
						"acc_stderr,none": 0.014219570788103986,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.28104575163398693,
						"acc_stderr,none": 0.025738854797818723,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3324750563244287,
						"acc_stderr,none": 0.04757686754396759,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3440514469453376,
						"acc_stderr,none": 0.02698147804364802,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2993827160493827,
						"acc_stderr,none": 0.025483115601195466,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2624113475177305,
						"acc_stderr,none": 0.026244920349843007,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.26010430247718386,
						"acc_stderr,none": 0.01120438288782383,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.27941176470588236,
						"acc_stderr,none": 0.02725720260611495,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.018433427649401906,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.39090909090909093,
						"acc_stderr,none": 0.04673752333670237,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.21224489795918366,
						"acc_stderr,none": 0.026176967197866764,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.29216769580760477,
						"acc_stderr,none": 0.04434698500716479,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.34328358208955223,
						"acc_stderr,none": 0.03357379665433431,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.278464954012052,
						"acc_stderr,none": 0.05600175119809852,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.044084400227680794,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3192771084337349,
						"acc_stderr,none": 0.03629335329947859,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3157894736842105,
						"acc_stderr,none": 0.03565079670708312,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3403973509933775,
						"acc_stderr,none": 0.004783119756674966,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3391985353946298,
						"acc_stderr,none": 0.0047748928966867485,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7450980392156863,
						"acc_stderr,none": 0.02160210604737706,
						"alias": " - mrpc",
						"f1,none": 0.8375,
						"f1_stderr,none": 0.015782920851466743
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.037396121883656507,
						"exact_match_stderr,remove_whitespace": 0.0031582292087544163
					},
					"openbookqa": {
						"acc,none": 0.294,
						"acc_norm,none": 0.412,
						"acc_norm_stderr,none": 0.02203367799374087,
						"acc_stderr,none": 0.020395095484936617,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4205,
						"acc_stderr,none": 0.011040870681821415,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3895,
						"acc_stderr,none": 0.010906619649373084,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4015,
						"acc_stderr,none": 0.010963985565921716,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5475,
						"acc_stderr,none": 0.011132557743886102,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.01112249319745627,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.521,
						"acc_stderr,none": 0.011173268141438304,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5115,
						"acc_stderr,none": 0.011180177690296084,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.47764285714285715,
						"acc_stderr,none": 0.04847792303759187,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7627856365614799,
						"acc_norm,none": 0.7693144722524483,
						"acc_norm_stderr,none": 0.00982895955098308,
						"acc_stderr,none": 0.00992469493358635,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7229046473512126,
						"acc_norm,none": 0.6028010843815743,
						"acc_norm_stderr,none": 0.0103565013317006,
						"acc_stderr,none": 0.1580975061550171,
						"alias": "pythia",
						"bits_per_byte,none": 0.6354231617249144,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5533933141943035,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.409767320201968,
						"perplexity_stderr,none": 0.06717001201762643,
						"word_perplexity,none": 10.540665068527451,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49789492952590153,
						"acc_stderr,none": 0.006765350592089551,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.5884739055157061,
						"acc_stderr,none": 0.0024474610815998607,
						"alias": " - qqp",
						"f1,none": 0.6369785302845173,
						"f1_stderr,none": 0.0026272729123894703
					},
					"record": {
						"alias": "record",
						"em,none": 0.2708,
						"em_stderr,none": 0.004443952167956555,
						"f1,none": 0.2809885716766119,
						"f1_stderr,none": 0.004454289869731537
					},
					"rte": {
						"acc,none": 0.6570397111913358,
						"acc_stderr,none": 0.02857348326765377,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.944,
						"acc_norm,none": 0.915,
						"acc_norm_stderr,none": 0.00882342636694233,
						"acc_stderr,none": 0.007274401481697051,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9105504587155964,
						"acc_stderr,none": 0.009670122820901166,
						"alias": " - sst2"
					},
					"triviaqa": {
						"alias": "triviaqa",
						"exact_match,remove_whitespace": 0.17275969683459652,
						"exact_match_stderr,remove_whitespace": 0.002822211327235643
					},
					"truthfulqa": {
						"acc,none": 0.3095892224575064,
						"acc_stderr,none": 0.0014655154419428882,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.016095884155386847,
						"bleu_diff,none": -8.687369852615783,
						"bleu_diff_stderr,none": 0.8684570990002328,
						"bleu_max,none": 26.846250747826357,
						"bleu_max_stderr,none": 0.8068451098676166,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.01532182168847618,
						"rouge1_diff,none": -11.078552930908804,
						"rouge1_diff_stderr,none": 0.9205243229676898,
						"rouge1_max,none": 51.74706804161411,
						"rouge1_max_stderr,none": 0.8810481797202292,
						"rouge2_acc,none": 0.23990208078335373,
						"rouge2_acc_stderr,none": 0.014948812679062142,
						"rouge2_diff,none": -13.418690797496167,
						"rouge2_diff_stderr,none": 1.1153164657530568,
						"rouge2_max,none": 35.49351791798558,
						"rouge2_max_stderr,none": 1.032756634715349,
						"rougeL_acc,none": 0.2582619339045288,
						"rougeL_acc_stderr,none": 0.01532182168847618,
						"rougeL_diff,none": -11.544212877659202,
						"rougeL_diff_stderr,none": 0.9352180265777663,
						"rougeL_max,none": 48.98540483001104,
						"rougeL_max_stderr,none": 0.9008156163507788
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.016095884155386847,
						"bleu_diff,none": -8.687369852615783,
						"bleu_diff_stderr,none": 0.8684570990002328,
						"bleu_max,none": 26.846250747826357,
						"bleu_max_stderr,none": 0.8068451098676166,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.01532182168847618,
						"rouge1_diff,none": -11.078552930908804,
						"rouge1_diff_stderr,none": 0.9205243229676898,
						"rouge1_max,none": 51.74706804161411,
						"rouge1_max_stderr,none": 0.8810481797202292,
						"rouge2_acc,none": 0.23990208078335373,
						"rouge2_acc_stderr,none": 0.014948812679062142,
						"rouge2_diff,none": -13.418690797496167,
						"rouge2_diff_stderr,none": 1.1153164657530568,
						"rouge2_max,none": 35.49351791798558,
						"rouge2_max_stderr,none": 1.032756634715349,
						"rougeL_acc,none": 0.2582619339045288,
						"rougeL_acc_stderr,none": 0.01532182168847618,
						"rougeL_diff,none": -11.544212877659202,
						"rougeL_diff_stderr,none": 0.9352180265777663,
						"rougeL_max,none": 48.98540483001104,
						"rougeL_max_stderr,none": 0.9008156163507788
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2386780905752754,
						"acc_stderr,none": 0.014922629695456416,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3805003543397375,
						"acc_stderr,none": 0.013887640144167292,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6354231617249144,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5533933141943035,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.540665068527451,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6740331491712708,
						"acc_stderr,none": 0.01317378263692218,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5352112676056338,
						"acc_stderr,none": 0.0596130578497224,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.47115384615384615,
						"acc_stderr,none": 0.04918440626354964,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6176363636363637,
						"acc_stderr,none": 0.06535555046798423,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.02191237788577997,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231326,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177528,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.019920483209566072,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.022064943313928862,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.022094713229761784,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.626,
						"acc_stderr,none": 0.02166071034720448,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.020704041021724805,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.020553269174209177,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43295850066934405,
						"acc_stderr,none": 0.051432181581821335,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337347,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.46586345381526106,
						"acc_stderr,none": 0.00999868806610265,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4887550200803213,
						"acc_stderr,none": 0.010019537972975081,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3823293172690763,
						"acc_stderr,none": 0.009740580649033706,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5389558232931727,
						"acc_stderr,none": 0.009991608448389065,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4943775100401606,
						"acc_stderr,none": 0.010021439203777328,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5048192771084338,
						"acc_stderr,none": 0.010021607322475493,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42088353413654617,
						"acc_stderr,none": 0.0098958129140522,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4891566265060241,
						"acc_stderr,none": 0.010019715824483482,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3795180722891566,
						"acc_stderr,none": 0.00972676337283714,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40602409638554215,
						"acc_stderr,none": 0.009843462007384217,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44497991967871486,
						"acc_stderr,none": 0.009961210239024621,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.39156626506024095,
						"acc_stderr,none": 0.009783558109997087,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.4004016064257028,
						"acc_stderr,none": 0.009821225609763081,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.351004016064257,
						"acc_stderr,none": 0.009566753834803286,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6267372600926537,
						"acc_stderr,none": 0.05526328579177065,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5943084050297816,
						"acc_stderr,none": 0.012636170220503923,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7809397749834547,
						"acc_stderr,none": 0.01064393129434969,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7114493712772998,
						"acc_stderr,none": 0.01165989229518815,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.557246856386499,
						"acc_stderr,none": 0.012782510750319232,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5962938451356717,
						"acc_stderr,none": 0.012626249735246585,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6611515552614163,
						"acc_stderr,none": 0.01218049075873904,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5261416280608868,
						"acc_stderr,none": 0.01284952688804421,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6810059563203177,
						"acc_stderr,none": 0.011994392833931961,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5579086697551291,
						"acc_stderr,none": 0.012780536370279766,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5883520847121112,
						"acc_stderr,none": 0.012664648329214092,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6393117140966248,
						"acc_stderr,none": 0.012357592682139026,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8118678354686446,
						"acc_stderr,none": 0.03618077047283569,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8675268817204301,
						"acc_stderr,none": 0.007032136436579833,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.048740641331093675,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7403545359749739,
						"acc_stderr,none": 0.01416535575958735,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.8022813688212928,
						"acc_stderr,none": 0.02460574422970024,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6793650793650794,
						"acc_stderr,none": 0.02633857021981404,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7916666666666666,
						"acc_stderr,none": 0.018107836663152056,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/chunk7-1-0_85_pth"
	},
	"./rwkv-x-dev/chunk7-2-0_85_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6124577226606539,
						"acc_norm,none": 0.5961104847801578,
						"acc_norm_stderr,none": 0.09007513859253347,
						"acc_stderr,none": 0.11164451197700213,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.34875,
						"acc_stderr,none": 0.014906472137358122,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8177910447761194,
						"acc_stderr,none": 0.16234176904345302,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.29778967363149705,
						"acc_norm,none": 0.29778967363149705,
						"acc_norm_stderr,none": 0.0519706805019735,
						"acc_stderr,none": 0.0519706805019735,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5276983445105874,
						"acc_stderr,none": 0.10981766351899606,
						"alias": "glue",
						"f1,none": 0.650662845574523,
						"f1_stderr,none": 0.00031471083142472296,
						"mcc,none": -0.03589254563226399,
						"mcc_stderr,none": 0.0001488869033596195
					},
					"lambada": {
						"acc,none": 0.7080341548612459,
						"acc_stderr,none": 0.017347853022796823,
						"alias": "lambada",
						"perplexity,none": 3.89284569335448,
						"perplexity_stderr,none": 0.2613057639913743
					},
					"lambada_multilingual": {
						"acc,none": 0.5339802057054143,
						"acc_stderr,none": 0.08566511376621706,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.362335170411367,
						"perplexity_stderr,none": 8.412713719891872
					},
					"mmlu": {
						"acc,none": 0.27930494231590935,
						"acc_stderr,none": 0.04308506877971217,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2688629117959617,
						"acc_stderr,none": 0.031111470297894468,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.30994528484068234,
						"acc_stderr,none": 0.04357145438862974,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2703932401689958,
						"acc_stderr,none": 0.04319623752839338,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2733904218204884,
						"acc_stderr,none": 0.05015860277888214,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4800714285714286,
						"acc_stderr,none": 0.049729329785782234,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7199236721416042,
						"acc_norm,none": 0.6008044980811904,
						"acc_norm_stderr,none": 0.010214006289457305,
						"acc_stderr,none": 0.1519940145363875,
						"alias": "pythia",
						"bits_per_byte,none": 0.6357859240233928,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.553783960426541,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3967170550249257,
						"perplexity_stderr,none": 0.06687718240486987,
						"word_perplexity,none": 10.55484762148609,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3078003067304741,
						"acc_stderr,none": 0.0014441094480576583,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.016095884155386847,
						"bleu_diff,none": -8.504686412590155,
						"bleu_diff_stderr,none": 0.8823104299235347,
						"bleu_max,none": 26.9642305836957,
						"bleu_max_stderr,none": 0.8065063308625604,
						"rouge1_acc,none": 0.26438188494492043,
						"rouge1_acc_stderr,none": 0.015438211119522505,
						"rouge1_diff,none": -10.751765115772496,
						"rouge1_diff_stderr,none": 0.9311089480396613,
						"rouge1_max,none": 52.11705772631609,
						"rouge1_max_stderr,none": 0.8796288648840946,
						"rouge2_acc,none": 0.24724602203182375,
						"rouge2_acc_stderr,none": 0.015102404797359652,
						"rouge2_diff,none": -12.801216957906565,
						"rouge2_diff_stderr,none": 1.1354169898696072,
						"rouge2_max,none": 36.05164300767999,
						"rouge2_max_stderr,none": 1.0323358913591065,
						"rougeL_acc,none": 0.25703794369645044,
						"rougeL_acc_stderr,none": 0.015298077509485081,
						"rougeL_diff,none": -11.19594557503081,
						"rougeL_diff_stderr,none": 0.9451608608828242,
						"rougeL_max,none": 49.22361767233658,
						"rougeL_max_stderr,none": 0.9033879505363906
					},
					"xcopa": {
						"acc,none": 0.622,
						"acc_stderr,none": 0.07017392330703087,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4347791164658635,
						"acc_stderr,none": 0.054487023296732794,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6242705011732146,
						"acc_stderr,none": 0.05501521392939839,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8096201393571589,
						"acc_stderr,none": 0.036270451183184706,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6124577226606539,
						"acc_norm,none": 0.5961104847801578,
						"acc_norm_stderr,none": 0.09007513859253347,
						"acc_stderr,none": 0.11164451197700213,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.34875,
						"acc_stderr,none": 0.014906472137358122,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.355,
						"acc_stderr,none": 0.015139491543780532,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.348,
						"acc_stderr,none": 0.01507060460376841,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3441666666666667,
						"acc_stderr,none": 0.013720551062295756,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.37627986348122866,
						"acc_norm,none": 0.4061433447098976,
						"acc_norm_stderr,none": 0.014351656690097858,
						"acc_stderr,none": 0.014157022555407175,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7289562289562289,
						"acc_norm,none": 0.6898148148148148,
						"acc_norm_stderr,none": 0.009491721291998515,
						"acc_stderr,none": 0.0091209197417606,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8177910447761194,
						"acc_stderr,none": 0.16234176904345302,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.00932045443478321,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298384,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653895,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.763,
						"acc_stderr,none": 0.013454070462577931,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.549,
						"acc_stderr,none": 0.01574315237958553,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.766,
						"acc_stderr,none": 0.013394902889660007,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.01070937396352802,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030093,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639228,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.00669595667816304,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033843,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866446,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487914,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178333,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.01013146813875699,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.01455320568795043,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.725,
						"acc_stderr,none": 0.014127086556490531,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.753,
						"acc_stderr,none": 0.013644675781314121,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323506,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.866,
						"acc_stderr,none": 0.010777762298369688,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689081,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.278,
						"acc_stderr,none": 0.01417451646148525,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151101,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.809,
						"acc_stderr,none": 0.012436787112179479,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858915,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.769,
						"acc_stderr,none": 0.01333479721693643,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474924,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849877,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244055,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491116,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.649,
						"acc_stderr,none": 0.015100563798316407,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397262,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.436,
						"acc_stderr,none": 0.015689173023144064,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.015387682761897066,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.565,
						"acc_stderr,none": 0.015685057252717204,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336667,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024973,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024968,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.771,
						"acc_stderr,none": 0.013294199326613609,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286411,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.001413505570557814,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651526,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386677,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.01580663942303517,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.007088105617246446,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381795,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256577,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.725,
						"acc_stderr,none": 0.014127086556490533,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.015740004693383845,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.00953361892934098,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118772,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.015459721957493377,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504401,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.010640169792499363,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.012864077288499332,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160328,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706824,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333368,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178367,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274702,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.394,
						"acc_stderr,none": 0.01545972195749338,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.309,
						"acc_stderr,none": 0.014619600977206484,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.29778967363149705,
						"acc_norm,none": 0.29778967363149705,
						"acc_norm_stderr,none": 0.0519706805019735,
						"acc_stderr,none": 0.0519706805019735,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676976,
						"acc_stderr,none": 0.03410167836676976,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2905405405405405,
						"acc_norm,none": 0.2905405405405405,
						"acc_norm_stderr,none": 0.03744626397928733,
						"acc_stderr,none": 0.03744626397928733,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.038851434494290536,
						"acc_stderr,none": 0.038851434494290536,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3393939393939394,
						"acc_norm,none": 0.3393939393939394,
						"acc_norm_stderr,none": 0.03697442205031596,
						"acc_stderr,none": 0.03697442205031596,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3588516746411483,
						"acc_norm,none": 0.3588516746411483,
						"acc_norm_stderr,none": 0.033258685263024994,
						"acc_stderr,none": 0.033258685263024994,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2900763358778626,
						"acc_norm,none": 0.2900763358778626,
						"acc_norm_stderr,none": 0.03980066246467766,
						"acc_stderr,none": 0.03980066246467766,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.34558823529411764,
						"acc_norm,none": 0.34558823529411764,
						"acc_norm_stderr,none": 0.04092966025145302,
						"acc_stderr,none": 0.04092966025145302,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.04556837693674772,
						"acc_stderr,none": 0.04556837693674772,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3219814241486068,
						"acc_norm,none": 0.3219814241486068,
						"acc_norm_stderr,none": 0.026038038744338663,
						"acc_stderr,none": 0.026038038744338663,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.03166009679399813,
						"acc_stderr,none": 0.03166009679399813,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2737430167597765,
						"acc_norm,none": 0.2737430167597765,
						"acc_norm_stderr,none": 0.03342001835130119,
						"acc_stderr,none": 0.03342001835130119,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.02830465794303529,
						"acc_stderr,none": 0.02830465794303529,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.18867924528301888,
						"acc_norm,none": 0.18867924528301888,
						"acc_norm_stderr,none": 0.0381824426969915,
						"acc_stderr,none": 0.0381824426969915,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.37383177570093457,
						"acc_norm,none": 0.37383177570093457,
						"acc_norm_stderr,none": 0.046992731189948504,
						"acc_stderr,none": 0.046992731189948504,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3490566037735849,
						"acc_norm,none": 0.3490566037735849,
						"acc_norm_stderr,none": 0.04651841326529027,
						"acc_stderr,none": 0.04651841326529027,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.18518518518518517,
						"acc_norm,none": 0.18518518518518517,
						"acc_norm_stderr,none": 0.037552658650371835,
						"acc_stderr,none": 0.037552658650371835,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055041,
						"acc_stderr,none": 0.04232473532055041,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.043505468189990605,
						"acc_stderr,none": 0.043505468189990605,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.27472527472527475,
						"acc_norm,none": 0.27472527472527475,
						"acc_norm_stderr,none": 0.02706550456438953,
						"acc_stderr,none": 0.02706550456438953,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.31862745098039214,
						"acc_norm,none": 0.31862745098039214,
						"acc_norm_stderr,none": 0.032702871814820796,
						"acc_stderr,none": 0.032702871814820796,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.25146198830409355,
						"acc_norm,none": 0.25146198830409355,
						"acc_norm_stderr,none": 0.033275044238468436,
						"acc_stderr,none": 0.033275044238468436,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.03711513959675178,
						"acc_stderr,none": 0.03711513959675178,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.3237410071942446,
						"acc_norm,none": 0.3237410071942446,
						"acc_norm_stderr,none": 0.03983050752105461,
						"acc_stderr,none": 0.03983050752105461,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.29559748427672955,
						"acc_norm,none": 0.29559748427672955,
						"acc_norm_stderr,none": 0.036302143777231344,
						"acc_stderr,none": 0.036302143777231344,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.294478527607362,
						"acc_norm,none": 0.294478527607362,
						"acc_norm_stderr,none": 0.03581165790474082,
						"acc_stderr,none": 0.03581165790474082,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.313953488372093,
						"acc_norm,none": 0.313953488372093,
						"acc_norm_stderr,none": 0.035490439822271735,
						"acc_stderr,none": 0.035490439822271735,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.30158730158730157,
						"acc_norm,none": 0.30158730158730157,
						"acc_norm_stderr,none": 0.028968481368260038,
						"acc_stderr,none": 0.028968481368260038,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2676767676767677,
						"acc_norm,none": 0.2676767676767677,
						"acc_norm_stderr,none": 0.031544498882702866,
						"acc_stderr,none": 0.031544498882702866,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.36554621848739494,
						"acc_norm,none": 0.36554621848739494,
						"acc_norm_stderr,none": 0.03128217706368461,
						"acc_stderr,none": 0.03128217706368461,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.02836109930007507,
						"acc_stderr,none": 0.02836109930007507,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.3037037037037037,
						"acc_norm,none": 0.3037037037037037,
						"acc_norm_stderr,none": 0.039725528847851375,
						"acc_stderr,none": 0.039725528847851375,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.34965034965034963,
						"acc_norm,none": 0.34965034965034963,
						"acc_norm_stderr,none": 0.040017160283823947,
						"acc_stderr,none": 0.040017160283823947,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2556818181818182,
						"acc_norm,none": 0.2556818181818182,
						"acc_norm_stderr,none": 0.032976929254344596,
						"acc_stderr,none": 0.032976929254344596,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3422818791946309,
						"acc_norm,none": 0.3422818791946309,
						"acc_norm_stderr,none": 0.03900147211095722,
						"acc_stderr,none": 0.03900147211095722,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.27218934911242604,
						"acc_norm,none": 0.27218934911242604,
						"acc_norm_stderr,none": 0.03433919627548533,
						"acc_stderr,none": 0.03433919627548533,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.03661433360410718,
						"acc_stderr,none": 0.03661433360410718,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.288135593220339,
						"acc_norm,none": 0.288135593220339,
						"acc_norm_stderr,none": 0.04187011593049808,
						"acc_stderr,none": 0.04187011593049808,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.03334645408665338,
						"acc_stderr,none": 0.03334645408665338,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878284,
						"acc_stderr,none": 0.04122066502878284,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.03822127078536156,
						"acc_stderr,none": 0.03822127078536156,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.04040610178208841,
						"acc_stderr,none": 0.04040610178208841,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741765,
						"acc_stderr,none": 0.03351597731741765,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.31976744186046513,
						"acc_norm,none": 0.31976744186046513,
						"acc_norm_stderr,none": 0.035665455380848116,
						"acc_stderr,none": 0.035665455380848116,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.29927007299270075,
						"acc_norm,none": 0.29927007299270075,
						"acc_norm_stderr,none": 0.022615961145736822,
						"acc_stderr,none": 0.022615961145736822,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3691588785046729,
						"acc_norm,none": 0.3691588785046729,
						"acc_norm_stderr,none": 0.03306563404172723,
						"acc_stderr,none": 0.03306563404172723,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2764227642276423,
						"acc_norm,none": 0.2764227642276423,
						"acc_norm_stderr,none": 0.0404901546062249,
						"acc_stderr,none": 0.0404901546062249,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3360655737704918,
						"acc_norm,none": 0.3360655737704918,
						"acc_norm_stderr,none": 0.042941965824810475,
						"acc_stderr,none": 0.042941965824810475,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.031840067304739414,
						"acc_stderr,none": 0.031840067304739414,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03523442817211266,
						"acc_stderr,none": 0.03523442817211266,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.034380708208626445,
						"acc_stderr,none": 0.034380708208626445,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.29310344827586204,
						"acc_norm,none": 0.29310344827586204,
						"acc_norm_stderr,none": 0.04244626443180183,
						"acc_stderr,none": 0.04244626443180183,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3724137931034483,
						"acc_norm,none": 0.3724137931034483,
						"acc_norm_stderr,none": 0.0402873153294756,
						"acc_stderr,none": 0.0402873153294756,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.04622501635210239,
						"acc_stderr,none": 0.04622501635210239,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26285714285714284,
						"acc_norm,none": 0.26285714285714284,
						"acc_norm_stderr,none": 0.033370375852212746,
						"acc_stderr,none": 0.033370375852212746,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26066350710900477,
						"acc_norm,none": 0.26066350710900477,
						"acc_norm_stderr,none": 0.03029364566174281,
						"acc_stderr,none": 0.03029364566174281,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2473404255319149,
						"acc_norm,none": 0.2473404255319149,
						"acc_norm_stderr,none": 0.022280822212812246,
						"acc_stderr,none": 0.022280822212812246,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.33620689655172414,
						"acc_norm,none": 0.33620689655172414,
						"acc_norm_stderr,none": 0.031082338581586128,
						"acc_stderr,none": 0.031082338581586128,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3275862068965517,
						"acc_norm,none": 0.3275862068965517,
						"acc_norm_stderr,none": 0.03568272877241248,
						"acc_stderr,none": 0.03568272877241248,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3141592920353982,
						"acc_norm,none": 0.3141592920353982,
						"acc_norm_stderr,none": 0.030945344741493033,
						"acc_stderr,none": 0.030945344741493033,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.03567969772268049,
						"acc_stderr,none": 0.03567969772268049,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.03369553691877717,
						"acc_stderr,none": 0.03369553691877717,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.31952662721893493,
						"acc_norm,none": 0.31952662721893493,
						"acc_norm_stderr,none": 0.03597530251676528,
						"acc_stderr,none": 0.03597530251676528,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3167701863354037,
						"acc_norm,none": 0.3167701863354037,
						"acc_norm_stderr,none": 0.03677863131157453,
						"acc_stderr,none": 0.03677863131157453,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.035410885580708956,
						"acc_stderr,none": 0.035410885580708956,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.03589254563226399,
						"mcc_stderr,none": 0.01220192211742148
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.034873508801977704,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5276983445105874,
						"acc_stderr,none": 0.10981766351899606,
						"alias": "glue",
						"f1,none": 0.650662845574523,
						"f1_stderr,none": 0.00031471083142472296,
						"mcc,none": -0.03589254563226399,
						"mcc_stderr,none": 0.0001488869033596195
					},
					"hellaswag": {
						"acc,none": 0.5264887472615017,
						"acc_norm,none": 0.708922525393348,
						"acc_norm_stderr,none": 0.00453330775852133,
						"acc_stderr,none": 0.004982774293927772,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7080341548612459,
						"acc_stderr,none": 0.017347853022796823,
						"alias": "lambada",
						"perplexity,none": 3.89284569335448,
						"perplexity_stderr,none": 0.2613057639913743
					},
					"lambada_multilingual": {
						"acc,none": 0.5339802057054143,
						"acc_stderr,none": 0.08566511376621706,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.362335170411367,
						"perplexity_stderr,none": 8.412713719891872
					},
					"lambada_openai": {
						"acc,none": 0.7405394915583156,
						"acc_stderr,none": 0.006106914181105517,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3967170550249257,
						"perplexity_stderr,none": 0.06687718240486987
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.41568018629924314,
						"acc_stderr,none": 0.006866209357631998,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 35.578759957204646,
						"perplexity_stderr,none": 1.9530692960374056
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.740151368135067,
						"acc_stderr,none": 0.006109878348081184,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3965794418270128,
						"perplexity_stderr,none": 0.06689643434002161
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4546865903357268,
						"acc_stderr,none": 0.006937312121911724,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.69818711724391,
						"perplexity_stderr,none": 1.401771833202636
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5501649524548806,
						"acc_stderr,none": 0.006930828902453044,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.79280894739065,
						"perplexity_stderr,none": 0.811166800248416
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5092179313021541,
						"acc_stderr,none": 0.006964793754756436,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.345340388390607,
						"perplexity_stderr,none": 1.1874998934545904
					},
					"lambada_standard": {
						"acc,none": 0.6757228798758005,
						"acc_stderr,none": 0.006521605716950192,
						"alias": " - lambada_standard",
						"perplexity,none": 4.388991033246837,
						"perplexity_stderr,none": 0.09484145716488686
					},
					"logiqa": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.271889400921659,
						"acc_norm_stderr,none": 0.017451716009436836,
						"acc_stderr,none": 0.016705867034419633,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.27930494231590935,
						"acc_stderr,none": 0.04308506877971217,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.3111111111111111,
						"acc_stderr,none": 0.0399926287661772,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.26973684210526316,
						"acc_stderr,none": 0.03611780560284898,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.33584905660377357,
						"acc_stderr,none": 0.02906722014664483,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3125,
						"acc_stderr,none": 0.038760854559127644,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421276,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.24277456647398843,
						"acc_stderr,none": 0.0326926380614177,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.20588235294117646,
						"acc_stderr,none": 0.040233822736177476,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720683,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.32340425531914896,
						"acc_stderr,none": 0.030579442773610337,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.03999423879281337,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.03724563619774632,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24338624338624337,
						"acc_stderr,none": 0.022101128787415412,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.04006168083848877,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.36451612903225805,
						"acc_stderr,none": 0.02737987122994325,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2660098522167488,
						"acc_stderr,none": 0.03108982600293752,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421255,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.2909090909090909,
						"acc_stderr,none": 0.03546563019624337,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2878787878787879,
						"acc_stderr,none": 0.03225883512300993,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.3471502590673575,
						"acc_stderr,none": 0.034356961683613546,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2358974358974359,
						"acc_stderr,none": 0.021525965407408726,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24814814814814815,
						"acc_stderr,none": 0.026335739404055803,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.028657491285071966,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.25165562913907286,
						"acc_stderr,none": 0.035433042343899844,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.27339449541284405,
						"acc_stderr,none": 0.0191092998460983,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.18055555555555555,
						"acc_stderr,none": 0.026232878971491656,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.03039153369274154,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2911392405063291,
						"acc_stderr,none": 0.02957160106575337,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3721973094170404,
						"acc_stderr,none": 0.03244305283008731,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.26717557251908397,
						"acc_stderr,none": 0.038808483010823944,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2688629117959617,
						"acc_stderr,none": 0.031111470297894468,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.3305785123966942,
						"acc_stderr,none": 0.04294340845212093,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.32407407407407407,
						"acc_stderr,none": 0.0452459600703005,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2883435582822086,
						"acc_stderr,none": 0.035590395316173425,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.04432804055291519,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2815533980582524,
						"acc_stderr,none": 0.04453254836326467,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.34615384615384615,
						"acc_stderr,none": 0.0311669573672359,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.3550446998722861,
						"acc_stderr,none": 0.01711208577277298,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.28034682080924855,
						"acc_stderr,none": 0.024182427496577605,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23910614525139665,
						"acc_stderr,none": 0.014265554192331158,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.27450980392156865,
						"acc_stderr,none": 0.025553169991826514,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.30994528484068234,
						"acc_stderr,none": 0.04357145438862974,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2990353697749196,
						"acc_stderr,none": 0.02600330111788514,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2808641975308642,
						"acc_stderr,none": 0.025006469755799208,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2624113475177305,
						"acc_stderr,none": 0.02624492034984301,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.25749674054758803,
						"acc_stderr,none": 0.011167706014904135,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.23897058823529413,
						"acc_stderr,none": 0.02590528064489301,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.27450980392156865,
						"acc_stderr,none": 0.018054027458815198,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.39090909090909093,
						"acc_stderr,none": 0.04673752333670238,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.18775510204081633,
						"acc_stderr,none": 0.0250002560395462,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2703932401689958,
						"acc_stderr,none": 0.04319623752839338,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.30845771144278605,
						"acc_stderr,none": 0.03265819588512697,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2733904218204884,
						"acc_stderr,none": 0.05015860277888214,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.30120481927710846,
						"acc_stderr,none": 0.035716092300534796,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.29239766081871343,
						"acc_stderr,none": 0.03488647713457923,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3370351502801834,
						"acc_stderr,none": 0.004771553779358485,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3370626525630594,
						"acc_stderr,none": 0.004767522070011886,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7573529411764706,
						"acc_stderr,none": 0.021249047596394875,
						"alias": " - mrpc",
						"f1,none": 0.8421052631578947,
						"f1_stderr,none": 0.01568382487227569
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.04155124653739612,
						"exact_match_stderr,remove_whitespace": 0.003321873086134382
					},
					"openbookqa": {
						"acc,none": 0.304,
						"acc_norm,none": 0.406,
						"acc_norm_stderr,none": 0.021983962090086337,
						"acc_stderr,none": 0.02059164957122493,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.418,
						"acc_stderr,none": 0.011031720148042084,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.385,
						"acc_stderr,none": 0.010883323176386987,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4155,
						"acc_stderr,none": 0.011022278362940804,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.011135708419359801,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.549,
						"acc_stderr,none": 0.011129305041886316,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.011168006186472578,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.521,
						"acc_stderr,none": 0.011173268141438302,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4800714285714286,
						"acc_stderr,none": 0.049729329785782234,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7568008705114254,
						"acc_norm,none": 0.766050054406964,
						"acc_norm_stderr,none": 0.009877236895137457,
						"acc_stderr,none": 0.010009611953858933,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7199236721416042,
						"acc_norm,none": 0.6008044980811904,
						"acc_norm_stderr,none": 0.010214006289457305,
						"acc_stderr,none": 0.1519940145363875,
						"alias": "pythia",
						"bits_per_byte,none": 0.6357859240233928,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.553783960426541,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3967170550249257,
						"perplexity_stderr,none": 0.06687718240486987,
						"word_perplexity,none": 10.55484762148609,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.4993593263774483,
						"acc_stderr,none": 0.006765404997877071,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.613603759584467,
						"acc_stderr,none": 0.00242166437179597,
						"alias": " - qqp",
						"f1,none": 0.6490384615384616,
						"f1_stderr,none": 0.002633082199465377
					},
					"record": {
						"alias": "record",
						"em,none": 0.2681,
						"em_stderr,none": 0.0044299211528132125,
						"f1,none": 0.2781285716742277,
						"f1_stderr,none": 0.004441448073483435
					},
					"rte": {
						"acc,none": 0.628158844765343,
						"acc_stderr,none": 0.02909101849221745,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.944,
						"acc_norm,none": 0.915,
						"acc_norm_stderr,none": 0.008823426366942277,
						"acc_stderr,none": 0.007274401481697057,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.908256880733945,
						"acc_stderr,none": 0.009780972139670115,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3078003067304741,
						"acc_stderr,none": 0.0014441094480576583,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.016095884155386847,
						"bleu_diff,none": -8.504686412590155,
						"bleu_diff_stderr,none": 0.8823104299235347,
						"bleu_max,none": 26.9642305836957,
						"bleu_max_stderr,none": 0.8065063308625604,
						"rouge1_acc,none": 0.26438188494492043,
						"rouge1_acc_stderr,none": 0.015438211119522505,
						"rouge1_diff,none": -10.751765115772496,
						"rouge1_diff_stderr,none": 0.9311089480396613,
						"rouge1_max,none": 52.11705772631609,
						"rouge1_max_stderr,none": 0.8796288648840946,
						"rouge2_acc,none": 0.24724602203182375,
						"rouge2_acc_stderr,none": 0.015102404797359652,
						"rouge2_diff,none": -12.801216957906565,
						"rouge2_diff_stderr,none": 1.1354169898696072,
						"rouge2_max,none": 36.05164300767999,
						"rouge2_max_stderr,none": 1.0323358913591065,
						"rougeL_acc,none": 0.25703794369645044,
						"rougeL_acc_stderr,none": 0.015298077509485081,
						"rougeL_diff,none": -11.19594557503081,
						"rougeL_diff_stderr,none": 0.9451608608828242,
						"rougeL_max,none": 49.22361767233658,
						"rougeL_max_stderr,none": 0.9033879505363906
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.016095884155386847,
						"bleu_diff,none": -8.504686412590155,
						"bleu_diff_stderr,none": 0.8823104299235347,
						"bleu_max,none": 26.9642305836957,
						"bleu_max_stderr,none": 0.8065063308625604,
						"rouge1_acc,none": 0.26438188494492043,
						"rouge1_acc_stderr,none": 0.015438211119522505,
						"rouge1_diff,none": -10.751765115772496,
						"rouge1_diff_stderr,none": 0.9311089480396613,
						"rouge1_max,none": 52.11705772631609,
						"rouge1_max_stderr,none": 0.8796288648840946,
						"rouge2_acc,none": 0.24724602203182375,
						"rouge2_acc_stderr,none": 0.015102404797359652,
						"rouge2_diff,none": -12.801216957906565,
						"rouge2_diff_stderr,none": 1.1354169898696072,
						"rouge2_max,none": 36.05164300767999,
						"rouge2_max_stderr,none": 1.0323358913591065,
						"rougeL_acc,none": 0.25703794369645044,
						"rougeL_acc_stderr,none": 0.015298077509485081,
						"rougeL_diff,none": -11.19594557503081,
						"rougeL_diff_stderr,none": 0.9451608608828242,
						"rougeL_max,none": 49.22361767233658,
						"rougeL_max_stderr,none": 0.9033879505363906
					},
					"truthfulqa_mc1": {
						"acc,none": 0.23745410036719705,
						"acc_stderr,none": 0.014896277441041857,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3781465130937512,
						"acc_stderr,none": 0.013811696424932959,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6357859240233928,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.553783960426541,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.55484762148609,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6764009471191792,
						"acc_stderr,none": 0.01314888332092315,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5774647887323944,
						"acc_stderr,none": 0.05903984205682581,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5384615384615384,
						"acc_stderr,none": 0.04912048887947827,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.622,
						"acc_stderr,none": 0.07017392330703087,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.598,
						"acc_stderr,none": 0.02194892960993861,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.020011219298073535,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.019827714859587564,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.022382357781962132,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407056,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.022017482578127672,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.02204949796982787,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.636,
						"acc_stderr,none": 0.021539170637317695,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.020553269174209188,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.020395095484936603,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4347791164658635,
						"acc_stderr,none": 0.054487023296732794,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3353413654618474,
						"acc_stderr,none": 0.009463034891512706,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.47269076305220886,
						"acc_stderr,none": 0.010007112889731978,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4939759036144578,
						"acc_stderr,none": 0.010021345444047586,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3887550200803213,
						"acc_stderr,none": 0.009770869423441483,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5393574297188755,
						"acc_stderr,none": 0.009990976095711881,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4991967871485944,
						"acc_stderr,none": 0.010022059935722399,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5076305220883535,
						"acc_stderr,none": 0.010020905731542325,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42530120481927713,
						"acc_stderr,none": 0.00990959719222113,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4867469879518072,
						"acc_stderr,none": 0.010018551648218457,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3751004016064257,
						"acc_stderr,none": 0.009704349720814059,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.4036144578313253,
						"acc_stderr,none": 0.009834096424955396,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44899598393574297,
						"acc_stderr,none": 0.00996979347724083,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40160642570281124,
						"acc_stderr,none": 0.009826103601507132,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.38835341365461845,
						"acc_stderr,none": 0.009769028875673286,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3550200803212851,
						"acc_stderr,none": 0.00959151273097429,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6242705011732146,
						"acc_stderr,none": 0.05501521392939839,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5916611515552614,
						"acc_stderr,none": 0.012649064392162167,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7776307081403044,
						"acc_stderr,none": 0.010701277694882511,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7074784910655195,
						"acc_stderr,none": 0.01170703857297503,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5552614162806089,
						"acc_stderr,none": 0.012788295970207792,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5976174718729318,
						"acc_stderr,none": 0.01261951681952872,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6565188616810059,
						"acc_stderr,none": 0.012220432513619237,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5254798146922568,
						"acc_stderr,none": 0.012850407240776846,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6803441429516877,
						"acc_stderr,none": 0.012000993063297277,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5552614162806089,
						"acc_stderr,none": 0.012788295970207789,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5823957643944407,
						"acc_stderr,none": 0.01269121138284864,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6373262739907346,
						"acc_stderr,none": 0.012372301216772914,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8096201393571589,
						"acc_stderr,none": 0.036270451183184706,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8658064516129033,
						"acc_stderr,none": 0.007070630875697702,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109368,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7455683003128258,
						"acc_stderr,none": 0.014071700152397672,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.779467680608365,
						"acc_stderr,none": 0.025614420399944937,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6666666666666666,
						"acc_stderr,none": 0.02660289614892078,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7896825396825397,
						"acc_stderr,none": 0.018171046497690278,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/chunk7-2-0_85_pth"
	},
	"./rwkv-x-dev/chunk8-1-0_85_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6104847801578354,
						"acc_norm,none": 0.5947012401352875,
						"acc_norm_stderr,none": 0.08981642534388244,
						"acc_stderr,none": 0.11191934569215296,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.35375,
						"acc_stderr,none": 0.014710780611883979,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8174328358208955,
						"acc_stderr,none": 0.16999871422692428,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.29148678984631327,
						"acc_norm,none": 0.29148678984631327,
						"acc_norm_stderr,none": 0.05263848216553079,
						"acc_stderr,none": 0.05263848216553079,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5111660314435446,
						"acc_stderr,none": 0.009135782070714867,
						"alias": "glue",
						"f1,none": 0.6354276316936914,
						"f1_stderr,none": 0.00038511735871933785,
						"mcc,none": -0.03589254563226399,
						"mcc_stderr,none": 0.012097905313463839
					},
					"lambada": {
						"acc,none": 0.708131185717058,
						"acc_stderr,none": 0.016942016317419987,
						"alias": "lambada",
						"perplexity,none": 3.9014217849616575,
						"perplexity_stderr,none": 0.263639202582672
					},
					"lambada_multilingual": {
						"acc,none": 0.5347176402095867,
						"acc_stderr,none": 0.08168066602823938,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.22725001011707,
						"perplexity_stderr,none": 8.195892590745013
					},
					"mmlu": {
						"acc,none": 0.276527560176613,
						"acc_stderr,none": 0.04250274917115265,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2680127523910733,
						"acc_stderr,none": 0.029872153837294264,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3041519150305761,
						"acc_stderr,none": 0.04479280256283234,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.26389340266493333,
						"acc_stderr,none": 0.04180401299233723,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.27434189660640657,
						"acc_stderr,none": 0.05012332233404547,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4769285714285714,
						"acc_stderr,none": 0.055501427997569915,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7189723873685494,
						"acc_norm,none": 0.5994725263669435,
						"acc_norm_stderr,none": 0.010146701420303482,
						"acc_stderr,none": 0.15809216308012172,
						"alias": "pythia",
						"bits_per_byte,none": 0.6354002445381076,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5533686387632453,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4003205873121494,
						"perplexity_stderr,none": 0.06698424189454974,
						"word_perplexity,none": 10.539769738564361,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3073536942243492,
						"acc_stderr,none": 0.0015142912612350664,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2998776009791922,
						"bleu_acc_stderr,none": 0.016040352966713627,
						"bleu_diff,none": -8.551701185936416,
						"bleu_diff_stderr,none": 0.868991143674006,
						"bleu_max,none": 26.8592713922627,
						"bleu_max_stderr,none": 0.8049223875861826,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.015321821688476178,
						"rouge1_diff,none": -11.189399175085565,
						"rouge1_diff_stderr,none": 0.9155690320731978,
						"rouge1_max,none": 51.91119837024075,
						"rouge1_max_stderr,none": 0.8771583988300754,
						"rouge2_acc,none": 0.23623011015911874,
						"rouge2_acc_stderr,none": 0.0148697550158711,
						"rouge2_diff,none": -13.383315019102694,
						"rouge2_diff_stderr,none": 1.1145542685235739,
						"rouge2_max,none": 35.6603113153865,
						"rouge2_max_stderr,none": 1.030444424949211,
						"rougeL_acc,none": 0.2582619339045288,
						"rougeL_acc_stderr,none": 0.01532182168847618,
						"rougeL_diff,none": -11.432064081825608,
						"rougeL_diff_stderr,none": 0.9281125527246897,
						"rougeL_max,none": 49.02693749331348,
						"rougeL_max_stderr,none": 0.8984266681686445
					},
					"xcopa": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.07109482192806219,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4383935742971888,
						"acc_stderr,none": 0.053353870542395446,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6241501714698273,
						"acc_stderr,none": 0.06263083364535375,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8134412227466846,
						"acc_stderr,none": 0.04132648453380926,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6104847801578354,
						"acc_norm,none": 0.5947012401352875,
						"acc_norm_stderr,none": 0.08981642534388244,
						"acc_stderr,none": 0.11191934569215296,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.35375,
						"acc_stderr,none": 0.014710780611883979,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.356,
						"acc_stderr,none": 0.015149042659306626,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.355,
						"acc_stderr,none": 0.01513949154378053,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.35083333333333333,
						"acc_stderr,none": 0.013782212417178193,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.37372013651877134,
						"acc_norm,none": 0.4052901023890785,
						"acc_norm_stderr,none": 0.014346869060229323,
						"acc_stderr,none": 0.014137708601759079,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7272727272727273,
						"acc_norm,none": 0.6881313131313131,
						"acc_norm_stderr,none": 0.009505823345817654,
						"acc_stderr,none": 0.00913863072636423,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8174328358208955,
						"acc_stderr,none": 0.16999871422692428,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787728,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036407,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.00263779414624376,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.011828605831454285,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024963,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.766,
						"acc_stderr,none": 0.01339490288966001,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.547,
						"acc_stderr,none": 0.015749255189977586,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.769,
						"acc_stderr,none": 0.01333479721693644,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963528035,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987286,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036437,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474928,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380705,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151089,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704171,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178349,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823332,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.014606483127342758,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.014205696104091513,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.753,
						"acc_stderr,none": 0.013644675781314125,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919307,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.01064016979249936,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611454,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.276,
						"acc_stderr,none": 0.014142984975740671,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787738,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.012510816141264357,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.607,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.013512312258920842,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855755,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704166,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.015356947477797575,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.454,
						"acc_stderr,none": 0.015752210388771833,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.01532510550889813,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.555,
						"acc_stderr,none": 0.015723301886760934,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.00985982840703719,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.015445859463771298,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340968,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.00973955126578513,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425668,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474918,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346938,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651535,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.012510816141264374,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.489,
						"acc_stderr,none": 0.01581547119529269,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.0071508835212954315,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747394,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403644,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.01414298497574067,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.01574000469338385,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695798,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.0065588122414061275,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.01545972195749338,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.01091215263250441,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963528022,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319325,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504408,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557424,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306471,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306532,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.388,
						"acc_stderr,none": 0.015417317979911077,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.304,
						"acc_stderr,none": 0.014553205687950436,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.29148678984631327,
						"acc_norm,none": 0.29148678984631327,
						"acc_norm_stderr,none": 0.05263848216553079,
						"acc_stderr,none": 0.05263848216553079,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.03279317792268947,
						"acc_stderr,none": 0.03279317792268947,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.27702702702702703,
						"acc_norm,none": 0.27702702702702703,
						"acc_norm_stderr,none": 0.036911647897386525,
						"acc_stderr,none": 0.036911647897386525,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.0344500028917346,
						"acc_stderr,none": 0.0344500028917346,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.38125,
						"acc_norm,none": 0.38125,
						"acc_norm_stderr,none": 0.03851802138867094,
						"acc_stderr,none": 0.03851802138867094,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3151515151515151,
						"acc_norm,none": 0.3151515151515151,
						"acc_norm_stderr,none": 0.0362773057502241,
						"acc_stderr,none": 0.0362773057502241,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3492822966507177,
						"acc_norm,none": 0.3492822966507177,
						"acc_norm_stderr,none": 0.03305620024300092,
						"acc_stderr,none": 0.03305620024300092,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.034049163262375844,
						"acc_stderr,none": 0.034049163262375844,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2900763358778626,
						"acc_norm,none": 0.2900763358778626,
						"acc_norm_stderr,none": 0.03980066246467765,
						"acc_stderr,none": 0.03980066246467765,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.34558823529411764,
						"acc_norm,none": 0.34558823529411764,
						"acc_norm_stderr,none": 0.04092966025145302,
						"acc_stderr,none": 0.04092966025145302,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.04556837693674772,
						"acc_stderr,none": 0.04556837693674772,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.30030959752321984,
						"acc_norm,none": 0.30030959752321984,
						"acc_norm_stderr,none": 0.02554521889840194,
						"acc_stderr,none": 0.02554521889840194,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.27941176470588236,
						"acc_norm,none": 0.27941176470588236,
						"acc_norm_stderr,none": 0.031493281045079556,
						"acc_stderr,none": 0.031493281045079556,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2681564245810056,
						"acc_norm,none": 0.2681564245810056,
						"acc_norm_stderr,none": 0.033204216306737123,
						"acc_stderr,none": 0.033204216306737123,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25738396624472576,
						"acc_norm,none": 0.25738396624472576,
						"acc_norm_stderr,none": 0.028458820991460288,
						"acc_stderr,none": 0.028458820991460288,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.1509433962264151,
						"acc_norm,none": 0.1509433962264151,
						"acc_norm_stderr,none": 0.03493660753858682,
						"acc_stderr,none": 0.03493660753858682,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.40186915887850466,
						"acc_norm,none": 0.40186915887850466,
						"acc_norm_stderr,none": 0.04761979313593575,
						"acc_stderr,none": 0.04761979313593575,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.36792452830188677,
						"acc_norm,none": 0.36792452830188677,
						"acc_norm_stderr,none": 0.04706187110761455,
						"acc_stderr,none": 0.04706187110761455,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.19444444444444445,
						"acc_norm,none": 0.19444444444444445,
						"acc_norm_stderr,none": 0.038260763248848646,
						"acc_stderr,none": 0.038260763248848646,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055042,
						"acc_stderr,none": 0.04232473532055042,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800375,
						"acc_stderr,none": 0.04142972007800375,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2783882783882784,
						"acc_norm,none": 0.2783882783882784,
						"acc_norm_stderr,none": 0.027176455318754136,
						"acc_stderr,none": 0.027176455318754136,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.35294117647058826,
						"acc_norm,none": 0.35294117647058826,
						"acc_norm_stderr,none": 0.03354092437591518,
						"acc_stderr,none": 0.03354092437591518,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.25146198830409355,
						"acc_norm,none": 0.25146198830409355,
						"acc_norm_stderr,none": 0.033275044238468436,
						"acc_stderr,none": 0.033275044238468436,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.272108843537415,
						"acc_norm,none": 0.272108843537415,
						"acc_norm_stderr,none": 0.036832239154550236,
						"acc_stderr,none": 0.036832239154550236,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.31654676258992803,
						"acc_norm,none": 0.31654676258992803,
						"acc_norm_stderr,none": 0.03959440284735793,
						"acc_stderr,none": 0.03959440284735793,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.31446540880503143,
						"acc_norm,none": 0.31446540880503143,
						"acc_norm_stderr,none": 0.036937932500422856,
						"acc_stderr,none": 0.036937932500422856,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3006134969325153,
						"acc_norm,none": 0.3006134969325153,
						"acc_norm_stderr,none": 0.03602511318806771,
						"acc_stderr,none": 0.03602511318806771,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.3023255813953488,
						"acc_norm,none": 0.3023255813953488,
						"acc_norm_stderr,none": 0.0351209126342837,
						"acc_stderr,none": 0.0351209126342837,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2896825396825397,
						"acc_norm,none": 0.2896825396825397,
						"acc_norm_stderr,none": 0.02863192475336099,
						"acc_stderr,none": 0.02863192475336099,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.23737373737373738,
						"acc_norm,none": 0.23737373737373738,
						"acc_norm_stderr,none": 0.030313710538198892,
						"acc_stderr,none": 0.030313710538198892,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.36554621848739494,
						"acc_norm,none": 0.36554621848739494,
						"acc_norm_stderr,none": 0.0312821770636846,
						"acc_stderr,none": 0.0312821770636846,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.25217391304347825,
						"acc_norm,none": 0.25217391304347825,
						"acc_norm_stderr,none": 0.02869674529449334,
						"acc_stderr,none": 0.02869674529449334,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.34265734265734266,
						"acc_norm,none": 0.34265734265734266,
						"acc_norm_stderr,none": 0.039827381778096436,
						"acc_stderr,none": 0.039827381778096436,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03366618544627455,
						"acc_stderr,none": 0.03366618544627455,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3422818791946309,
						"acc_norm,none": 0.3422818791946309,
						"acc_norm_stderr,none": 0.03900147211095722,
						"acc_stderr,none": 0.03900147211095722,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331529,
						"acc_stderr,none": 0.03360300796331529,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552483,
						"acc_stderr,none": 0.03703667194552483,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2711864406779661,
						"acc_norm,none": 0.2711864406779661,
						"acc_norm_stderr,none": 0.041100705493392085,
						"acc_stderr,none": 0.041100705493392085,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878284,
						"acc_stderr,none": 0.04122066502878284,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604673,
						"acc_stderr,none": 0.03893259610604673,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336699,
						"acc_stderr,none": 0.03333068663336699,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3313953488372093,
						"acc_norm,none": 0.3313953488372093,
						"acc_norm_stderr,none": 0.035996464381795934,
						"acc_stderr,none": 0.035996464381795934,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2944038929440389,
						"acc_norm,none": 0.2944038929440389,
						"acc_norm_stderr,none": 0.022509089804193683,
						"acc_stderr,none": 0.022509089804193683,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3598130841121495,
						"acc_norm,none": 0.3598130841121495,
						"acc_norm_stderr,none": 0.032885319913188285,
						"acc_stderr,none": 0.032885319913188285,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3170731707317073,
						"acc_norm,none": 0.3170731707317073,
						"acc_norm_stderr,none": 0.04212955964853051,
						"acc_stderr,none": 0.04212955964853051,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.32786885245901637,
						"acc_norm,none": 0.32786885245901637,
						"acc_norm_stderr,none": 0.04267606874299955,
						"acc_stderr,none": 0.04267606874299955,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.03169833889962088,
						"acc_stderr,none": 0.03169833889962088,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3055555555555556,
						"acc_norm,none": 0.3055555555555556,
						"acc_norm_stderr,none": 0.03443002441392583,
						"acc_stderr,none": 0.03443002441392583,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2804232804232804,
						"acc_norm,none": 0.2804232804232804,
						"acc_norm_stderr,none": 0.03276171742795849,
						"acc_stderr,none": 0.03276171742795849,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.041265147363241,
						"acc_stderr,none": 0.041265147363241,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.03855289616378948,
						"acc_stderr,none": 0.03855289616378948,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.04513676718168311,
						"acc_stderr,none": 0.04513676718168311,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03424737867752743,
						"acc_stderr,none": 0.03424737867752743,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.03046967065084669,
						"acc_stderr,none": 0.03046967065084669,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2473404255319149,
						"acc_norm,none": 0.2473404255319149,
						"acc_norm_stderr,none": 0.02228082221281224,
						"acc_stderr,none": 0.02228082221281224,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3275862068965517,
						"acc_norm,none": 0.3275862068965517,
						"acc_norm_stderr,none": 0.03087984562096084,
						"acc_stderr,none": 0.03087984562096084,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3735632183908046,
						"acc_norm,none": 0.3735632183908046,
						"acc_norm_stderr,none": 0.0367788061186906,
						"acc_stderr,none": 0.0367788061186906,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3008849557522124,
						"acc_norm,none": 0.3008849557522124,
						"acc_norm_stderr,none": 0.030576185297580976,
						"acc_stderr,none": 0.030576185297580976,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624336,
						"acc_stderr,none": 0.03546563019624336,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2918918918918919,
						"acc_norm,none": 0.2918918918918919,
						"acc_norm_stderr,none": 0.03351597731741764,
						"acc_stderr,none": 0.03351597731741764,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03560846537586734,
						"acc_stderr,none": 0.03560846537586734,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2795031055900621,
						"acc_norm,none": 0.2795031055900621,
						"acc_norm_stderr,none": 0.035477203909303916,
						"acc_stderr,none": 0.035477203909303916,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.034893706520187605,
						"acc_stderr,none": 0.034893706520187605,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.03589254563226399,
						"mcc_stderr,none": 0.012097905313463839
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.034873508801977704,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5111660314435446,
						"acc_stderr,none": 0.009135782070714867,
						"alias": "glue",
						"f1,none": 0.6354276316936914,
						"f1_stderr,none": 0.00038511735871933785,
						"mcc,none": -0.03589254563226399,
						"mcc_stderr,none": 0.012097905313463839
					},
					"hellaswag": {
						"acc,none": 0.5257916749651463,
						"acc_norm,none": 0.7103166699860586,
						"acc_norm_stderr,none": 0.004526883021027611,
						"acc_stderr,none": 0.00498313847960438,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.708131185717058,
						"acc_stderr,none": 0.016942016317419987,
						"alias": "lambada",
						"perplexity,none": 3.9014217849616575,
						"perplexity_stderr,none": 0.263639202582672
					},
					"lambada_multilingual": {
						"acc,none": 0.5347176402095867,
						"acc_stderr,none": 0.08168066602823938,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.22725001011707,
						"perplexity_stderr,none": 8.195892590745013
					},
					"lambada_openai": {
						"acc,none": 0.7384048127304483,
						"acc_stderr,none": 0.006123140249839602,
						"alias": " - lambada_openai",
						"perplexity,none": 3.4003205873121494,
						"perplexity_stderr,none": 0.06698424189454974
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4185911119736076,
						"acc_stderr,none": 0.006873024743775555,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 35.199129576047866,
						"perplexity_stderr,none": 1.9349801328791227
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.739569183000194,
						"acc_stderr,none": 0.00611431293778828,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.4004233414601885,
						"perplexity_stderr,none": 0.06697370244310656
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4521637880846109,
						"acc_stderr,none": 0.006934023831544426,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.850981316009804,
						"perplexity_stderr,none": 1.4070906420015354
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5511352610130021,
						"acc_stderr,none": 0.0069294524147908276,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.529112781830488,
						"perplexity_stderr,none": 0.7966420480129628
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5121288569765186,
						"acc_stderr,none": 0.006963927837195674,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.15660303523699,
						"perplexity_stderr,none": 1.1689469598513689
					},
					"lambada_standard": {
						"acc,none": 0.676693188433922,
						"acc_stderr,none": 0.006516515049707147,
						"alias": " - lambada_standard",
						"perplexity,none": 4.4022486508116705,
						"perplexity_stderr,none": 0.09540545251248941
					},
					"logiqa": {
						"acc,none": 0.22887864823348694,
						"acc_norm,none": 0.2764976958525346,
						"acc_norm_stderr,none": 0.017543209075825177,
						"acc_stderr,none": 0.01647810727631329,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.276527560176613,
						"acc_stderr,none": 0.04250274917115265,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036845,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.31851851851851853,
						"acc_stderr,none": 0.040247784019771096,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.03583496176361061,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3132075471698113,
						"acc_stderr,none": 0.02854479331905533,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3263888888888889,
						"acc_stderr,none": 0.03921067198982266,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.23121387283236994,
						"acc_stderr,none": 0.03214737302029471,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.20588235294117646,
						"acc_stderr,none": 0.04023382273617746,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.31063829787234043,
						"acc_stderr,none": 0.03025123757921317,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2543859649122807,
						"acc_stderr,none": 0.04096985139843671,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2827586206896552,
						"acc_stderr,none": 0.03752833958003336,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.25132275132275134,
						"acc_stderr,none": 0.022340482339643898,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.30952380952380953,
						"acc_stderr,none": 0.041349130183033156,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.35161290322580646,
						"acc_stderr,none": 0.027162537826948458,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.26108374384236455,
						"acc_stderr,none": 0.030903796952114485,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.04461960433384739,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.26666666666666666,
						"acc_stderr,none": 0.03453131801885415,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.03191178226713547,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.3471502590673575,
						"acc_stderr,none": 0.03435696168361355,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.23333333333333334,
						"acc_stderr,none": 0.021444547301560497,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.02671924078371217,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.24789915966386555,
						"acc_stderr,none": 0.028047967224176896,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.26605504587155965,
						"acc_stderr,none": 0.018946022322225604,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.18518518518518517,
						"acc_stderr,none": 0.026491914727355164,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.25980392156862747,
						"acc_stderr,none": 0.030778554678693247,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.28270042194092826,
						"acc_stderr,none": 0.02931281415395593,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3632286995515695,
						"acc_stderr,none": 0.032277904428505,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.26717557251908397,
						"acc_stderr,none": 0.03880848301082396,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2680127523910733,
						"acc_stderr,none": 0.029872153837294264,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.32231404958677684,
						"acc_stderr,none": 0.04266416363352168,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.044531975073749834,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2883435582822086,
						"acc_stderr,none": 0.035590395316173425,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.04432804055291519,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.30097087378640774,
						"acc_stderr,none": 0.045416094465039476,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.31196581196581197,
						"acc_stderr,none": 0.030351527323344948,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.3499361430395913,
						"acc_stderr,none": 0.017055679797150437,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2774566473988439,
						"acc_stderr,none": 0.024105712607754307,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574894,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.02564686309713791,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3041519150305761,
						"acc_stderr,none": 0.04479280256283234,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2958199356913183,
						"acc_stderr,none": 0.025922371788818777,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.29012345679012347,
						"acc_stderr,none": 0.02525117393649501,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2553191489361702,
						"acc_stderr,none": 0.026011992930902,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2561929595827901,
						"acc_stderr,none": 0.011149173153110582,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.025767252010855966,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2696078431372549,
						"acc_stderr,none": 0.017952449196987866,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.38181818181818183,
						"acc_stderr,none": 0.04653429807913508,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.18775510204081633,
						"acc_stderr,none": 0.025000256039546212,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.26389340266493333,
						"acc_stderr,none": 0.04180401299233723,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.2736318407960199,
						"acc_stderr,none": 0.03152439186555401,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.27434189660640657,
						"acc_stderr,none": 0.05012332233404547,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.042295258468165065,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3072289156626506,
						"acc_stderr,none": 0.035915667978246635,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.2982456140350877,
						"acc_stderr,none": 0.03508771929824563,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3413143148242486,
						"acc_stderr,none": 0.004786227495822672,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.342046379170057,
						"acc_stderr,none": 0.004784552135582234,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7647058823529411,
						"acc_stderr,none": 0.02102594605453768,
						"alias": " - mrpc",
						"f1,none": 0.8456591639871383,
						"f1_stderr,none": 0.015590523708891517
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.03545706371191136,
						"exact_match_stderr,remove_whitespace": 0.0030783551664118067
					},
					"openbookqa": {
						"acc,none": 0.308,
						"acc_norm,none": 0.412,
						"acc_norm_stderr,none": 0.022033677993740865,
						"acc_stderr,none": 0.0206670329874661,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.424,
						"acc_stderr,none": 0.011053193499766087,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.371,
						"acc_stderr,none": 0.010804530019138506,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.416,
						"acc_stderr,none": 0.011024190055654283,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.547,
						"acc_stderr,none": 0.011133619300989868,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.011135708419359796,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.011168006186472582,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5085,
						"acc_stderr,none": 0.011181519941139164,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4769285714285714,
						"acc_stderr,none": 0.055501427997569915,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7578890097932536,
						"acc_norm,none": 0.7671381936887922,
						"acc_norm_stderr,none": 0.009861236071080755,
						"acc_stderr,none": 0.009994371269104367,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7189723873685494,
						"acc_norm,none": 0.5994725263669435,
						"acc_norm_stderr,none": 0.010146701420303482,
						"acc_stderr,none": 0.15809216308012172,
						"alias": "pythia",
						"bits_per_byte,none": 0.6354002445381076,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5533686387632453,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.4003205873121494,
						"perplexity_stderr,none": 0.06698424189454974,
						"word_perplexity,none": 10.539769738564361,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.5030203185063152,
						"acc_stderr,none": 0.0067652871181183415,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.5827850605985654,
						"acc_stderr,none": 0.0024523792397046188,
						"alias": " - qqp",
						"f1,none": 0.6336070202875885,
						"f1_stderr,none": 0.0026231496776330868
					},
					"record": {
						"alias": "record",
						"em,none": 0.2686,
						"em_stderr,none": 0.004432535249365748,
						"f1,none": 0.2791985716938972,
						"f1_stderr,none": 0.004445281812626251
					},
					"rte": {
						"acc,none": 0.6245487364620939,
						"acc_stderr,none": 0.029147775180820408,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.943,
						"acc_norm,none": 0.912,
						"acc_norm_stderr,none": 0.008963053962592072,
						"acc_stderr,none": 0.007335175853706829,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9048165137614679,
						"acc_stderr,none": 0.009943790947096227,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3073536942243492,
						"acc_stderr,none": 0.0015142912612350664,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2998776009791922,
						"bleu_acc_stderr,none": 0.016040352966713627,
						"bleu_diff,none": -8.551701185936416,
						"bleu_diff_stderr,none": 0.868991143674006,
						"bleu_max,none": 26.8592713922627,
						"bleu_max_stderr,none": 0.8049223875861826,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.015321821688476178,
						"rouge1_diff,none": -11.189399175085565,
						"rouge1_diff_stderr,none": 0.9155690320731978,
						"rouge1_max,none": 51.91119837024075,
						"rouge1_max_stderr,none": 0.8771583988300754,
						"rouge2_acc,none": 0.23623011015911874,
						"rouge2_acc_stderr,none": 0.0148697550158711,
						"rouge2_diff,none": -13.383315019102694,
						"rouge2_diff_stderr,none": 1.1145542685235739,
						"rouge2_max,none": 35.6603113153865,
						"rouge2_max_stderr,none": 1.030444424949211,
						"rougeL_acc,none": 0.2582619339045288,
						"rougeL_acc_stderr,none": 0.01532182168847618,
						"rougeL_diff,none": -11.432064081825608,
						"rougeL_diff_stderr,none": 0.9281125527246897,
						"rougeL_max,none": 49.02693749331348,
						"rougeL_max_stderr,none": 0.8984266681686445
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2998776009791922,
						"bleu_acc_stderr,none": 0.016040352966713627,
						"bleu_diff,none": -8.551701185936416,
						"bleu_diff_stderr,none": 0.868991143674006,
						"bleu_max,none": 26.8592713922627,
						"bleu_max_stderr,none": 0.8049223875861826,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.015321821688476178,
						"rouge1_diff,none": -11.189399175085565,
						"rouge1_diff_stderr,none": 0.9155690320731978,
						"rouge1_max,none": 51.91119837024075,
						"rouge1_max_stderr,none": 0.8771583988300754,
						"rouge2_acc,none": 0.23623011015911874,
						"rouge2_acc_stderr,none": 0.0148697550158711,
						"rouge2_diff,none": -13.383315019102694,
						"rouge2_diff_stderr,none": 1.1145542685235739,
						"rouge2_max,none": 35.6603113153865,
						"rouge2_max_stderr,none": 1.030444424949211,
						"rougeL_acc,none": 0.2582619339045288,
						"rougeL_acc_stderr,none": 0.01532182168847618,
						"rougeL_diff,none": -11.432064081825608,
						"rougeL_diff_stderr,none": 0.9281125527246897,
						"rougeL_max,none": 49.02693749331348,
						"rougeL_max_stderr,none": 0.8984266681686445
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2350061199510404,
						"acc_stderr,none": 0.014843061507731604,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3797012684976581,
						"acc_stderr,none": 0.013777838440612167,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6354002445381076,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5533686387632453,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.539769738564361,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6740331491712708,
						"acc_stderr,none": 0.013173782636922192,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5211267605633803,
						"acc_stderr,none": 0.05970805879899505,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.49038461538461536,
						"acc_stderr,none": 0.04925735314273532,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.07109482192806219,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.021814300984787635,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.02234794983266809,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.020055833888070907,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.01963596552972551,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.02227969410784342,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228134,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.636,
						"acc_stderr,none": 0.021539170637317695,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.020435342091896142,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177514,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4383935742971888,
						"acc_stderr,none": 0.053353870542395446,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.336144578313253,
						"acc_stderr,none": 0.009468634669293527,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4887550200803213,
						"acc_stderr,none": 0.010019537972975081,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4899598393574297,
						"acc_stderr,none": 0.010020052116889137,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39558232931726905,
						"acc_stderr,none": 0.009801094347134982,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5321285140562249,
						"acc_stderr,none": 0.01000136106817309,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5004016064257029,
						"acc_stderr,none": 0.010022069634353861,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5164658634538153,
						"acc_stderr,none": 0.010016636930829968,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41847389558232934,
						"acc_stderr,none": 0.009887951897505942,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.48714859437751,
						"acc_stderr,none": 0.010018761856718258,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.38072289156626504,
						"acc_stderr,none": 0.009732727412507497,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40602409638554215,
						"acc_stderr,none": 0.009843462007384222,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4534136546184739,
						"acc_stderr,none": 0.009978476483838964,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40763052208835343,
						"acc_stderr,none": 0.00984956920273372,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41164658634538154,
						"acc_stderr,none": 0.009864360821750339,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3514056224899598,
						"acc_stderr,none": 0.009569263079823963,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6241501714698273,
						"acc_stderr,none": 0.06263083364535375,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.599602911978822,
						"acc_stderr,none": 0.012609238175551169,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7782925215089345,
						"acc_stderr,none": 0.010689887294959689,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7081403044341495,
						"acc_stderr,none": 0.01169925603764938,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5579086697551291,
						"acc_stderr,none": 0.012780536370279766,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5909993381866314,
						"acc_stderr,none": 0.012652228567132374,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6558570483123759,
						"acc_stderr,none": 0.012226032926509721,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.528788881535407,
						"acc_stderr,none": 0.012845779070719507,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6757114493712773,
						"acc_stderr,none": 0.012046419229995326,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5506287227001986,
						"acc_stderr,none": 0.012800991591293373,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5837193911317009,
						"acc_stderr,none": 0.012685473350967525,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6360026472534746,
						"acc_stderr,none": 0.012381980068788365,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8134412227466846,
						"acc_stderr,none": 0.04132648453380926,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8653763440860215,
						"acc_stderr,none": 0.007080193677104268,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.0500664280504192,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7549530761209593,
						"acc_stderr,none": 0.013896385472596353,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7870722433460076,
						"acc_stderr,none": 0.02529139544566284,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6984126984126984,
						"acc_stderr,none": 0.025899880794833654,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7876984126984127,
						"acc_stderr,none": 0.018233607978187162,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/chunk8-1-0_85_pth"
	},
	"./rwkv-x-dev/r3-4k-test2-fix3-blink-final_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6319052987598647,
						"acc_norm,none": 0.633314543404735,
						"acc_norm_stderr,none": 0.09626976009788216,
						"acc_stderr,none": 0.10676966819302612,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.359375,
						"acc_stderr,none": 0.021781719122277002,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8383582089552238,
						"acc_stderr,none": 0.15204432697356238,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.3078052149887757,
						"acc_norm,none": 0.3078052149887757,
						"acc_norm_stderr,none": 0.06037078590057642,
						"acc_stderr,none": 0.06037078590057642,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5370116722248689,
						"acc_stderr,none": 0.010460701257337201,
						"alias": "glue",
						"f1,none": 0.657862004311887,
						"f1_stderr,none": 0.00027317800929356907,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.7160877158936542,
						"acc_stderr,none": 0.017102959875278094,
						"alias": "lambada",
						"perplexity,none": 3.776252908164677,
						"perplexity_stderr,none": 0.2340034067979183
					},
					"lambada_multilingual": {
						"acc,none": 0.5418591111973607,
						"acc_stderr,none": 0.08444102747672037,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.866530688155684,
						"perplexity_stderr,none": 8.177187436835187
					},
					"mmlu": {
						"acc,none": 0.3171912832929782,
						"acc_stderr,none": 0.05887720124163766,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.30882040382571735,
						"acc_stderr,none": 0.0487832658681239,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3614419053749598,
						"acc_stderr,none": 0.04906296067271302,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3324666883327917,
						"acc_stderr,none": 0.057410283714716,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2711703139866793,
						"acc_stderr,none": 0.06216713300308039,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4622857142857143,
						"acc_stderr,none": 0.04634266452288703,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7416469093808626,
						"acc_norm,none": 0.6369494254100625,
						"acc_norm_stderr,none": 0.011234607194570884,
						"acc_stderr,none": 0.14475367447134882,
						"alias": "pythia",
						"bits_per_byte,none": 0.636316373054604,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5543553595877877,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.336313981376705,
						"perplexity_stderr,none": 0.0656959414194124,
						"word_perplexity,none": 10.575620414187059,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3254234467577032,
						"acc_stderr,none": 0.0015130352812913553,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.33047735618115054,
						"bleu_acc_stderr,none": 0.01646676961369831,
						"bleu_diff,none": -5.7036490429140345,
						"bleu_diff_stderr,none": 0.9010152814723279,
						"bleu_max,none": 28.35416588379638,
						"bleu_max_stderr,none": 0.827112611733745,
						"rouge1_acc,none": 0.28886168910648713,
						"rouge1_acc_stderr,none": 0.015866346401384304,
						"rouge1_diff,none": -8.00630973621455,
						"rouge1_diff_stderr,none": 1.0091288952787751,
						"rouge1_max,none": 53.506433393008265,
						"rouge1_max_stderr,none": 0.8814295056590551,
						"rouge2_acc,none": 0.2778457772337821,
						"rouge2_acc_stderr,none": 0.01568092936402465,
						"rouge2_diff,none": -9.824237550112944,
						"rouge2_diff_stderr,none": 1.2058782446955432,
						"rouge2_max,none": 37.69812184422661,
						"rouge2_max_stderr,none": 1.0545210926191257,
						"rougeL_acc,none": 0.2962056303549572,
						"rougeL_acc_stderr,none": 0.015983595101811392,
						"rougeL_diff,none": -8.222814174837577,
						"rougeL_diff_stderr,none": 1.0203237631889481,
						"rougeL_max,none": 50.771430702549445,
						"rougeL_max_stderr,none": 0.9037994804473503
					},
					"xcopa": {
						"acc,none": 0.6232727272727273,
						"acc_stderr,none": 0.07112416203762281,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43651941097724234,
						"acc_stderr,none": 0.04855067774788887,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6323325913001624,
						"acc_stderr,none": 0.05279994042993179,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8100696785794561,
						"acc_stderr,none": 0.03634271832792071,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6319052987598647,
						"acc_norm,none": 0.633314543404735,
						"acc_norm_stderr,none": 0.09626976009788216,
						"acc_stderr,none": 0.10676966819302612,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.359375,
						"acc_stderr,none": 0.021781719122277002,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.394,
						"acc_stderr,none": 0.015459721957493382,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.341,
						"acc_stderr,none": 0.01499813134840269,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3458333333333333,
						"acc_stderr,none": 0.013736245342311012,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4061433447098976,
						"acc_norm,none": 0.4300341296928328,
						"acc_norm_stderr,none": 0.014467631559137996,
						"acc_stderr,none": 0.014351656690097862,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7432659932659933,
						"acc_norm,none": 0.7335858585858586,
						"acc_norm_stderr,none": 0.009071357971078683,
						"acc_stderr,none": 0.008963590834042409,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8383582089552238,
						"acc_stderr,none": 0.15204432697356238,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651523,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565647,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.011828605831454264,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524308,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.01295371756673723,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.599,
						"acc_stderr,none": 0.015506109745498325,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.773,
						"acc_stderr,none": 0.013253174964763935,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235237,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448817,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403632,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.0065588122414061015,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033842,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.006199874066337088,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.00858333697775365,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177546,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910613,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.00812557844248791,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.01265543994336666,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.765,
						"acc_stderr,none": 0.013414729030247123,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.01310617304066176,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.00801893405031516,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.01135891830347529,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689097,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.418,
						"acc_stderr,none": 0.015605111967541946,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108652,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366672,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.677,
						"acc_stderr,none": 0.014794927843348633,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.842,
						"acc_stderr,none": 0.011539894677559571,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.00807249435832349,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474922,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783226,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.015459721957493384,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108665,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.555,
						"acc_stderr,none": 0.01572330188676094,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.015445859463771295,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.711,
						"acc_stderr,none": 0.014341711358296188,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704164,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108668,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037191,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340995,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.814,
						"acc_stderr,none": 0.012310790208412803,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118777,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578247,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240651,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816324,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.525,
						"acc_stderr,none": 0.015799513429996033,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275285,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592078,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565578,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.013877773329774166,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.462,
						"acc_stderr,none": 0.01577354762901511,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397342,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679196,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.015275252316519359,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.01081165537241605,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.00973955126578514,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.812,
						"acc_stderr,none": 0.01236158601510377,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.01087884871433332,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.0076298239962803134,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.00791034598317755,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689101,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410041,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.387,
						"acc_stderr,none": 0.015410011955493937,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.339,
						"acc_stderr,none": 0.014976758771620345,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.3078052149887757,
						"acc_norm,none": 0.3078052149887757,
						"acc_norm_stderr,none": 0.06037078590057642,
						"acc_stderr,none": 0.06037078590057642,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.35502958579881655,
						"acc_norm,none": 0.35502958579881655,
						"acc_norm_stderr,none": 0.036918795945769134,
						"acc_stderr,none": 0.036918795945769134,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.038610038610038595,
						"acc_stderr,none": 0.038610038610038595,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.22560975609756098,
						"acc_norm,none": 0.22560975609756098,
						"acc_norm_stderr,none": 0.032738974545663414,
						"acc_stderr,none": 0.032738974545663414,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.4125,
						"acc_norm,none": 0.4125,
						"acc_norm_stderr,none": 0.03904067786683381,
						"acc_stderr,none": 0.03904067786683381,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.03501438706296781,
						"acc_stderr,none": 0.03501438706296781,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.03335451753206105,
						"acc_stderr,none": 0.03335451753206105,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2125,
						"acc_norm,none": 0.2125,
						"acc_norm_stderr,none": 0.03244189290245475,
						"acc_stderr,none": 0.03244189290245475,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3511450381679389,
						"acc_norm,none": 0.3511450381679389,
						"acc_norm_stderr,none": 0.0418644516301375,
						"acc_stderr,none": 0.0418644516301375,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.040263772107873096,
						"acc_stderr,none": 0.040263772107873096,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2897196261682243,
						"acc_norm,none": 0.2897196261682243,
						"acc_norm_stderr,none": 0.0440606533474851,
						"acc_stderr,none": 0.0440606533474851,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3219814241486068,
						"acc_norm,none": 0.3219814241486068,
						"acc_norm_stderr,none": 0.02603803874433866,
						"acc_stderr,none": 0.02603803874433866,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.30392156862745096,
						"acc_norm,none": 0.30392156862745096,
						"acc_norm_stderr,none": 0.03228210387037894,
						"acc_stderr,none": 0.03228210387037894,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.31843575418994413,
						"acc_norm,none": 0.31843575418994413,
						"acc_norm_stderr,none": 0.03491839802265681,
						"acc_stderr,none": 0.03491839802265681,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.270042194092827,
						"acc_norm,none": 0.270042194092827,
						"acc_norm_stderr,none": 0.028900721906293426,
						"acc_stderr,none": 0.028900721906293426,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.1792452830188679,
						"acc_norm,none": 0.1792452830188679,
						"acc_norm_stderr,none": 0.03743138631255277,
						"acc_stderr,none": 0.03743138631255277,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.4205607476635514,
						"acc_norm,none": 0.4205607476635514,
						"acc_norm_stderr,none": 0.04794743635189597,
						"acc_stderr,none": 0.04794743635189597,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.4056603773584906,
						"acc_norm,none": 0.4056603773584906,
						"acc_norm_stderr,none": 0.04791858528000114,
						"acc_stderr,none": 0.04791858528000114,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.23148148148148148,
						"acc_norm,none": 0.23148148148148148,
						"acc_norm_stderr,none": 0.04077494709252628,
						"acc_stderr,none": 0.04077494709252628,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.042324735320550415,
						"acc_stderr,none": 0.042324735320550415,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.040225592469367126,
						"acc_stderr,none": 0.040225592469367126,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2600732600732601,
						"acc_norm,none": 0.2600732600732601,
						"acc_norm_stderr,none": 0.026598537627601462,
						"acc_stderr,none": 0.026598537627601462,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3431372549019608,
						"acc_norm,none": 0.3431372549019608,
						"acc_norm_stderr,none": 0.03332139944668086,
						"acc_stderr,none": 0.03332139944668086,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.035650796707083106,
						"acc_stderr,none": 0.035650796707083106,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2925170068027211,
						"acc_norm,none": 0.2925170068027211,
						"acc_norm_stderr,none": 0.037649319840851715,
						"acc_stderr,none": 0.037649319840851715,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.31654676258992803,
						"acc_norm,none": 0.31654676258992803,
						"acc_norm_stderr,none": 0.039594402847357935,
						"acc_stderr,none": 0.039594402847357935,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.037676093121953455,
						"acc_stderr,none": 0.037676093121953455,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.32515337423312884,
						"acc_norm,none": 0.32515337423312884,
						"acc_norm_stderr,none": 0.036803503712864616,
						"acc_stderr,none": 0.036803503712864616,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.3023255813953488,
						"acc_norm,none": 0.3023255813953488,
						"acc_norm_stderr,none": 0.0351209126342837,
						"acc_stderr,none": 0.0351209126342837,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.02839429305079051,
						"acc_stderr,none": 0.02839429305079051,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2878787878787879,
						"acc_norm,none": 0.2878787878787879,
						"acc_norm_stderr,none": 0.03225883512300992,
						"acc_stderr,none": 0.03225883512300992,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.40336134453781514,
						"acc_norm,none": 0.40336134453781514,
						"acc_norm_stderr,none": 0.031866081214088314,
						"acc_stderr,none": 0.031866081214088314,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26521739130434785,
						"acc_norm,none": 0.26521739130434785,
						"acc_norm_stderr,none": 0.02917176407847258,
						"acc_stderr,none": 0.02917176407847258,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.32592592592592595,
						"acc_norm,none": 0.32592592592592595,
						"acc_norm_stderr,none": 0.04049122041702506,
						"acc_stderr,none": 0.04049122041702506,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3356643356643357,
						"acc_norm,none": 0.3356643356643357,
						"acc_norm_stderr,none": 0.03962800523347343,
						"acc_stderr,none": 0.03962800523347343,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.30113636363636365,
						"acc_norm,none": 0.30113636363636365,
						"acc_norm_stderr,none": 0.03467837977202436,
						"acc_stderr,none": 0.03467837977202436,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.31543624161073824,
						"acc_norm,none": 0.31543624161073824,
						"acc_norm_stderr,none": 0.03819723167141383,
						"acc_stderr,none": 0.03819723167141383,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552485,
						"acc_stderr,none": 0.03703667194552485,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3135593220338983,
						"acc_norm,none": 0.3135593220338983,
						"acc_norm_stderr,none": 0.04289122333662573,
						"acc_stderr,none": 0.04289122333662573,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.21818181818181817,
						"acc_norm,none": 0.21818181818181817,
						"acc_norm_stderr,none": 0.03955932861795833,
						"acc_stderr,none": 0.03955932861795833,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.03822127078536156,
						"acc_stderr,none": 0.03822127078536156,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.30158730158730157,
						"acc_norm,none": 0.30158730158730157,
						"acc_norm_stderr,none": 0.04104947269903394,
						"acc_stderr,none": 0.04104947269903394,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.03369553691877718,
						"acc_stderr,none": 0.03369553691877718,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.37209302325581395,
						"acc_norm,none": 0.37209302325581395,
						"acc_norm_stderr,none": 0.03696369368553606,
						"acc_stderr,none": 0.03696369368553606,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2846715328467153,
						"acc_norm,none": 0.2846715328467153,
						"acc_norm_stderr,none": 0.02228603692971729,
						"acc_stderr,none": 0.02228603692971729,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.4205607476635514,
						"acc_norm,none": 0.4205607476635514,
						"acc_norm_stderr,none": 0.033824276998905854,
						"acc_stderr,none": 0.033824276998905854,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03887917804888516,
						"acc_stderr,none": 0.03887917804888516,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3360655737704918,
						"acc_norm,none": 0.3360655737704918,
						"acc_norm_stderr,none": 0.04294196582481048,
						"acc_stderr,none": 0.04294196582481048,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03260773253630124,
						"acc_stderr,none": 0.03260773253630124,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.035650381774539115,
						"acc_stderr,none": 0.035650381774539115,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3439153439153439,
						"acc_norm,none": 0.3439153439153439,
						"acc_norm_stderr,none": 0.0346439012574329,
						"acc_stderr,none": 0.0346439012574329,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.33793103448275863,
						"acc_norm,none": 0.33793103448275863,
						"acc_norm_stderr,none": 0.039417076320648906,
						"acc_stderr,none": 0.039417076320648906,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.4095238095238095,
						"acc_norm,none": 0.4095238095238095,
						"acc_norm_stderr,none": 0.04821965555951261,
						"acc_stderr,none": 0.04821965555951261,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.0340385177358705,
						"acc_stderr,none": 0.0340385177358705,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27488151658767773,
						"acc_norm,none": 0.27488151658767773,
						"acc_norm_stderr,none": 0.030808291124780337,
						"acc_stderr,none": 0.030808291124780337,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2473404255319149,
						"acc_norm,none": 0.2473404255319149,
						"acc_norm_stderr,none": 0.022280822212812246,
						"acc_stderr,none": 0.022280822212812246,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.40948275862068967,
						"acc_norm,none": 0.40948275862068967,
						"acc_norm_stderr,none": 0.03235400970172882,
						"acc_stderr,none": 0.03235400970172882,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3390804597701149,
						"acc_norm,none": 0.3390804597701149,
						"acc_norm_stderr,none": 0.03599172203897236,
						"acc_stderr,none": 0.03599172203897236,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174021,
						"acc_stderr,none": 0.03749850709174021,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.336283185840708,
						"acc_norm,none": 0.336283185840708,
						"acc_norm_stderr,none": 0.03149580605318969,
						"acc_stderr,none": 0.03149580605318969,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.32727272727272727,
						"acc_norm,none": 0.32727272727272727,
						"acc_norm_stderr,none": 0.03663974994391242,
						"acc_stderr,none": 0.03663974994391242,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.33136094674556216,
						"acc_norm,none": 0.33136094674556216,
						"acc_norm_stderr,none": 0.03631548844087171,
						"acc_stderr,none": 0.03631548844087171,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.32298136645962733,
						"acc_norm,none": 0.32298136645962733,
						"acc_norm_stderr,none": 0.036968263701746544,
						"acc_stderr,none": 0.036968263701746544,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.035156741348767645,
						"acc_stderr,none": 0.035156741348767645,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.03487350880197771,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5370116722248689,
						"acc_stderr,none": 0.010460701257337201,
						"alias": "glue",
						"f1,none": 0.657862004311887,
						"f1_stderr,none": 0.00027317800929356907,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.5240987851025692,
						"acc_norm,none": 0.7031467835092611,
						"acc_norm_stderr,none": 0.00455937583580597,
						"acc_stderr,none": 0.0049839823961873655,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7160877158936542,
						"acc_stderr,none": 0.017102959875278094,
						"alias": "lambada",
						"perplexity,none": 3.776252908164677,
						"perplexity_stderr,none": 0.2340034067979183
					},
					"lambada_multilingual": {
						"acc,none": 0.5418591111973607,
						"acc_stderr,none": 0.08444102747672037,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.866530688155684,
						"perplexity_stderr,none": 8.177187436835187
					},
					"lambada_openai": {
						"acc,none": 0.7461672811954202,
						"acc_stderr,none": 0.0060632290441590565,
						"alias": " - lambada_openai",
						"perplexity,none": 3.336313981376705,
						"perplexity_stderr,none": 0.0656959414194124
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4294585678245682,
						"acc_stderr,none": 0.006896302489966823,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.01188094784429,
						"perplexity_stderr,none": 1.8852215908177803
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7461672811954202,
						"acc_stderr,none": 0.006063229044159063,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.33641865436334,
						"perplexity_stderr,none": 0.06564159035897987
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4579856394333398,
						"acc_stderr,none": 0.006941341313928114,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.37819564559139,
						"perplexity_stderr,none": 1.4540827309011843
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5544343101106152,
						"acc_stderr,none": 0.006924572910496156,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.268265011565354,
						"perplexity_stderr,none": 0.7898835575115243
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5212497574228605,
						"acc_stderr,none": 0.006959683808965934,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.33789318141406,
						"perplexity_stderr,none": 1.127577386859656
					},
					"lambada_standard": {
						"acc,none": 0.6842615951872696,
						"acc_stderr,none": 0.006475702085174231,
						"alias": " - lambada_standard",
						"perplexity,none": 4.216818519844873,
						"perplexity_stderr,none": 0.09027156595603063
					},
					"logiqa": {
						"acc,none": 0.2534562211981567,
						"acc_norm,none": 0.27035330261136714,
						"acc_norm_stderr,none": 0.017420694783393142,
						"acc_stderr,none": 0.017061705439785732,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3171912832929782,
						"acc_stderr,none": 0.05887720124163766,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.039427724440366234,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04072314811876837,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.21710526315789475,
						"acc_stderr,none": 0.03355045304882923,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.39245283018867927,
						"acc_stderr,none": 0.03005258057955784,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.03852084696008534,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.039427724440366234,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.32947976878612717,
						"acc_stderr,none": 0.0358390175473641,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171453,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3617021276595745,
						"acc_stderr,none": 0.0314108219759624,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21052631578947367,
						"acc_stderr,none": 0.038351539543994194,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2827586206896552,
						"acc_stderr,none": 0.037528339580033376,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.23015873015873015,
						"acc_stderr,none": 0.02167921966369314,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.04040610178208841,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3741935483870968,
						"acc_stderr,none": 0.027528904299845787,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2512315270935961,
						"acc_stderr,none": 0.030516530732694433,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3878787878787879,
						"acc_stderr,none": 0.03804913653971011,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3484848484848485,
						"acc_stderr,none": 0.033948539651564025,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.41968911917098445,
						"acc_stderr,none": 0.035615873276858834,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3076923076923077,
						"acc_stderr,none": 0.0234009289183105,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.28888888888888886,
						"acc_stderr,none": 0.027634907264178544,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2605042016806723,
						"acc_stderr,none": 0.028510251512341933,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23178807947019867,
						"acc_stderr,none": 0.034454062719870546,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3541284403669725,
						"acc_stderr,none": 0.020504729013829118,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.14351851851851852,
						"acc_stderr,none": 0.02391077925264438,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4264705882352941,
						"acc_stderr,none": 0.034711579079534254,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.4177215189873418,
						"acc_stderr,none": 0.032103530322412685,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.37668161434977576,
						"acc_stderr,none": 0.03252113489929188,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3816793893129771,
						"acc_stderr,none": 0.042607351576445594,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.30882040382571735,
						"acc_stderr,none": 0.0487832658681239,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.30578512396694213,
						"acc_stderr,none": 0.04205953933884125,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3148148148148148,
						"acc_stderr,none": 0.04489931073591312,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.32515337423312884,
						"acc_stderr,none": 0.03680350371286461,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.29464285714285715,
						"acc_stderr,none": 0.0432704093257873,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4077669902912621,
						"acc_stderr,none": 0.04865777570410769,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3717948717948718,
						"acc_stderr,none": 0.031660988918880785,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.41762452107279696,
						"acc_stderr,none": 0.017635637326951517,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3179190751445087,
						"acc_stderr,none": 0.025070713719153172,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574906,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.026090162504279053,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3614419053749598,
						"acc_stderr,none": 0.04906296067271302,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.38263665594855306,
						"acc_stderr,none": 0.027604689028582,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.31790123456790126,
						"acc_stderr,none": 0.02591006352824088,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2624113475177305,
						"acc_stderr,none": 0.026244920349843007,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.28292046936114734,
						"acc_stderr,none": 0.011503891323188974,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3713235294117647,
						"acc_stderr,none": 0.02934980313976587,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.31209150326797386,
						"acc_stderr,none": 0.01874501120127766,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.43636363636363634,
						"acc_stderr,none": 0.04750185058907296,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.24897959183673468,
						"acc_stderr,none": 0.027682979522960238,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3324666883327917,
						"acc_stderr,none": 0.057410283714716,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.43283582089552236,
						"acc_stderr,none": 0.03503490923673282,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2711703139866793,
						"acc_stderr,none": 0.06216713300308039,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939098,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3192771084337349,
						"acc_stderr,none": 0.03629335329947861,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3742690058479532,
						"acc_stderr,none": 0.03711601185389481,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3613856342333164,
						"acc_stderr,none": 0.004849330203645995,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.35526851098454026,
						"acc_stderr,none": 0.00482690721591138,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7328431372549019,
						"acc_stderr,none": 0.021932668544150206,
						"alias": " - mrpc",
						"f1,none": 0.8340943683409436,
						"f1_stderr,none": 0.015673169108281978
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.03822714681440443,
						"exact_match_stderr,remove_whitespace": 0.003191749202331416
					},
					"openbookqa": {
						"acc,none": 0.304,
						"acc_norm,none": 0.414,
						"acc_norm_stderr,none": 0.02204949796982787,
						"acc_stderr,none": 0.020591649571224932,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.408,
						"acc_stderr,none": 0.010992197878818595,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.382,
						"acc_stderr,none": 0.010867246593514932,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3815,
						"acc_stderr,none": 0.01086452456147863,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.547,
						"acc_stderr,none": 0.01113361930098987,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5285,
						"acc_stderr,none": 0.011164954236428794,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.486,
						"acc_stderr,none": 0.011178751372184862,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.503,
						"acc_stderr,none": 0.011182934722804566,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4622857142857143,
						"acc_stderr,none": 0.04634266452288703,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7704026115342764,
						"acc_norm,none": 0.7742110990206746,
						"acc_norm_stderr,none": 0.009754980670917325,
						"acc_stderr,none": 0.009812682950815192,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7416469093808626,
						"acc_norm,none": 0.6369494254100625,
						"acc_norm_stderr,none": 0.011234607194570884,
						"acc_stderr,none": 0.14475367447134882,
						"alias": "pythia",
						"bits_per_byte,none": 0.636316373054604,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5543553595877877,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.336313981376705,
						"perplexity_stderr,none": 0.0656959414194124,
						"word_perplexity,none": 10.575620414187059,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.4953322350356947,
						"acc_stderr,none": 0.006765115735419824,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6193173386099431,
						"acc_stderr,none": 0.0024148583892275566,
						"alias": " - qqp",
						"f1,none": 0.656335826727699,
						"f1_stderr,none": 0.00260063718079119
					},
					"record": {
						"alias": "record",
						"em,none": 0.2683,
						"em_stderr,none": 0.004430967653621015,
						"f1,none": 0.2781719050049782,
						"f1_stderr,none": 0.004442466298580053
					},
					"rte": {
						"acc,none": 0.6209386281588448,
						"acc_stderr,none": 0.029202804623788027,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.952,
						"acc_norm,none": 0.936,
						"acc_norm_stderr,none": 0.007743640226919292,
						"acc_stderr,none": 0.00676326413366667,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.8990825688073395,
						"acc_stderr,none": 0.010206428675489104,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3254234467577032,
						"acc_stderr,none": 0.0015130352812913553,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.33047735618115054,
						"bleu_acc_stderr,none": 0.01646676961369831,
						"bleu_diff,none": -5.7036490429140345,
						"bleu_diff_stderr,none": 0.9010152814723279,
						"bleu_max,none": 28.35416588379638,
						"bleu_max_stderr,none": 0.827112611733745,
						"rouge1_acc,none": 0.28886168910648713,
						"rouge1_acc_stderr,none": 0.015866346401384304,
						"rouge1_diff,none": -8.00630973621455,
						"rouge1_diff_stderr,none": 1.0091288952787751,
						"rouge1_max,none": 53.506433393008265,
						"rouge1_max_stderr,none": 0.8814295056590551,
						"rouge2_acc,none": 0.2778457772337821,
						"rouge2_acc_stderr,none": 0.01568092936402465,
						"rouge2_diff,none": -9.824237550112944,
						"rouge2_diff_stderr,none": 1.2058782446955432,
						"rouge2_max,none": 37.69812184422661,
						"rouge2_max_stderr,none": 1.0545210926191257,
						"rougeL_acc,none": 0.2962056303549572,
						"rougeL_acc_stderr,none": 0.015983595101811392,
						"rougeL_diff,none": -8.222814174837577,
						"rougeL_diff_stderr,none": 1.0203237631889481,
						"rougeL_max,none": 50.771430702549445,
						"rougeL_max_stderr,none": 0.9037994804473503
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.33047735618115054,
						"bleu_acc_stderr,none": 0.01646676961369831,
						"bleu_diff,none": -5.7036490429140345,
						"bleu_diff_stderr,none": 0.9010152814723279,
						"bleu_max,none": 28.35416588379638,
						"bleu_max_stderr,none": 0.827112611733745,
						"rouge1_acc,none": 0.28886168910648713,
						"rouge1_acc_stderr,none": 0.015866346401384304,
						"rouge1_diff,none": -8.00630973621455,
						"rouge1_diff_stderr,none": 1.0091288952787751,
						"rouge1_max,none": 53.506433393008265,
						"rouge1_max_stderr,none": 0.8814295056590551,
						"rouge2_acc,none": 0.2778457772337821,
						"rouge2_acc_stderr,none": 0.01568092936402465,
						"rouge2_diff,none": -9.824237550112944,
						"rouge2_diff_stderr,none": 1.2058782446955432,
						"rouge2_max,none": 37.69812184422661,
						"rouge2_max_stderr,none": 1.0545210926191257,
						"rougeL_acc,none": 0.2962056303549572,
						"rougeL_acc_stderr,none": 0.015983595101811392,
						"rougeL_diff,none": -8.222814174837577,
						"rougeL_diff_stderr,none": 1.0203237631889481,
						"rougeL_max,none": 50.771430702549445,
						"rougeL_max_stderr,none": 0.9037994804473503
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2533659730722154,
						"acc_stderr,none": 0.015225899340826845,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.39748092044319105,
						"acc_stderr,none": 0.014027667695025758,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.636316373054604,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5543553595877877,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.575620414187059,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6771902131018153,
						"acc_stderr,none": 0.013140498173357952,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4225352112676056,
						"acc_stderr,none": 0.05903984205682581,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5576923076923077,
						"acc_stderr,none": 0.04893740777700998,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6232727272727273,
						"acc_stderr,none": 0.07112416203762281,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.02191237788577997,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231336,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.019920483209566072,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.742,
						"acc_stderr,none": 0.019586711785215837,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.022374298166353185,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.022252153078595897,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.022094713229761784,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.626,
						"acc_stderr,none": 0.02166071034720448,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.712,
						"acc_stderr,none": 0.02027150383507522,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.020514426225628046,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43651941097724234,
						"acc_stderr,none": 0.04855067774788887,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3397590361445783,
						"acc_stderr,none": 0.009493454925438249,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4606425702811245,
						"acc_stderr,none": 0.009990976095711899,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4795180722891566,
						"acc_stderr,none": 0.010013660629930816,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38755020080321284,
						"acc_stderr,none": 0.009765326832218988,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5385542168674698,
						"acc_stderr,none": 0.00999223427599309,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4959839357429719,
						"acc_stderr,none": 0.010021749574555898,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4919678714859438,
						"acc_stderr,none": 0.010020779633955252,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.44016064257028115,
						"acc_stderr,none": 0.00995004096008806,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4755020080321285,
						"acc_stderr,none": 0.010010036112667868,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.38313253012048193,
						"acc_stderr,none": 0.00974446499428751,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.4072289156626506,
						"acc_stderr,none": 0.009848052628967667,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4506024096385542,
						"acc_stderr,none": 0.009973042774811678,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.42369477911646586,
						"acc_stderr,none": 0.009904678540828908,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41646586345381525,
						"acc_stderr,none": 0.009881215932115989,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3570281124497992,
						"acc_stderr,none": 0.009603615216109777,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6323325913001624,
						"acc_stderr,none": 0.05279994042993179,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.600264725347452,
						"acc_stderr,none": 0.012605764077627148,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7782925215089345,
						"acc_stderr,none": 0.010689887294959677,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.71343481138319,
						"acc_stderr,none": 0.01163591099550226,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5592322964923891,
						"acc_stderr,none": 0.012776518586332795,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6082064857710126,
						"acc_stderr,none": 0.012562199063960658,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6631369953673064,
						"acc_stderr,none": 0.01216297499613638,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5486432825943084,
						"acc_stderr,none": 0.012806088966122408,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6810059563203177,
						"acc_stderr,none": 0.01199439283393196,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5598941098610192,
						"acc_stderr,none": 0.012774475160716323,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5969556585043018,
						"acc_stderr,none": 0.012622895215907703,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6465916611515553,
						"acc_stderr,none": 0.012301695486460663,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8100696785794561,
						"acc_stderr,none": 0.03634271832792071,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8696774193548387,
						"acc_stderr,none": 0.006983463551504547,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7590361445783133,
						"acc_stderr,none": 0.04722807605987255,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7424400417101147,
						"acc_stderr,none": 0.014128209029143985,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7680608365019012,
						"acc_stderr,none": 0.02607559386030469,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6761904761904762,
						"acc_stderr,none": 0.026406722996729998,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7777777777777778,
						"acc_stderr,none": 0.018536917448559433,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/r3-4k-test2-fix3-blink-final_pth"
	},
	"./rwkv-x-dev/r3-c1-8_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.625140924464487,
						"acc_norm,none": 0.6135851183765502,
						"acc_norm_stderr,none": 0.09105549297577427,
						"acc_stderr,none": 0.108794773908048,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.353125,
						"acc_stderr,none": 0.017607920954123897,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8360746268656717,
						"acc_stderr,none": 0.15239545957101686,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.29778967363149716,
						"acc_norm,none": 0.29778967363149716,
						"acc_norm_stderr,none": 0.05505750916963189,
						"acc_stderr,none": 0.05505750916963189,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5384111481657933,
						"acc_stderr,none": 0.0101484578452747,
						"alias": "glue",
						"f1,none": 0.6574765528239048,
						"f1_stderr,none": 0.0002728662940626163,
						"mcc,none": 0.0463559874942472,
						"mcc_stderr,none": 0.029410776500703156
					},
					"lambada": {
						"acc,none": 0.7154084999029692,
						"acc_stderr,none": 0.01669944790417121,
						"alias": "lambada",
						"perplexity,none": 3.81425695648634,
						"perplexity_stderr,none": 0.23216706931219566
					},
					"lambada_multilingual": {
						"acc,none": 0.5406171162429653,
						"acc_stderr,none": 0.08583518673379903,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.61205345434061,
						"perplexity_stderr,none": 8.123644477245614
					},
					"mmlu": {
						"acc,none": 0.33485258510183735,
						"acc_stderr,none": 0.06479953249093713,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3232731137088204,
						"acc_stderr,none": 0.05987948359899165,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.38525909237206307,
						"acc_stderr,none": 0.049147967004506105,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.354891127721807,
						"acc_stderr,none": 0.06031337028984213,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.28290516967967017,
						"acc_stderr,none": 0.06506629255574989,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4665714285714286,
						"acc_stderr,none": 0.05360782093398782,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7421649126991223,
						"acc_norm,none": 0.617921984070359,
						"acc_norm_stderr,none": 0.010241040526241307,
						"acc_stderr,none": 0.14542911735610292,
						"alias": "pythia",
						"bits_per_byte,none": 0.6337753600979277,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5516200892795335,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3788700395663867,
						"perplexity_stderr,none": 0.06593735455525863,
						"word_perplexity,none": 10.476482130561946,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3256904033793372,
						"acc_stderr,none": 0.0017058484527051794,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.34394124847001223,
						"bleu_acc_stderr,none": 0.016629087514276806,
						"bleu_diff,none": -6.212602477683713,
						"bleu_diff_stderr,none": 0.9046111199929038,
						"bleu_max,none": 28.024366969502058,
						"bleu_max_stderr,none": 0.8205890920330008,
						"rouge1_acc,none": 0.3072215422276622,
						"rouge1_acc_stderr,none": 0.01615020132132304,
						"rouge1_diff,none": -7.827088374730196,
						"rouge1_diff_stderr,none": 1.0004341333672038,
						"rouge1_max,none": 52.87046191174578,
						"rouge1_max_stderr,none": 0.8900386830347308,
						"rouge2_acc,none": 0.27539779681762544,
						"rouge2_acc_stderr,none": 0.01563813566777552,
						"rouge2_diff,none": -9.56772502002179,
						"rouge2_diff_stderr,none": 1.18121598145371,
						"rouge2_max,none": 36.940379649756714,
						"rouge2_max_stderr,none": 1.0550728695505844,
						"rougeL_acc,none": 0.3011015911872705,
						"rougeL_acc_stderr,none": 0.016058999026100588,
						"rougeL_diff,none": -8.147797031614575,
						"rougeL_diff_stderr,none": 1.0090177970323322,
						"rougeL_max,none": 50.05199196229567,
						"rougeL_max_stderr,none": 0.9112620752225341
					},
					"xcopa": {
						"acc,none": 0.6221818181818182,
						"acc_stderr,none": 0.07329456526572707,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4372155287817938,
						"acc_stderr,none": 0.04802857391524508,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6313699536730641,
						"acc_stderr,none": 0.06166699757075865,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8057990559676331,
						"acc_stderr,none": 0.035792425222250554,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.625140924464487,
						"acc_norm,none": 0.6135851183765502,
						"acc_norm_stderr,none": 0.09105549297577427,
						"acc_stderr,none": 0.108794773908048,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.353125,
						"acc_stderr,none": 0.017607920954123897,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.374,
						"acc_stderr,none": 0.01530876736900636,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.014987482264363935,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3466666666666667,
						"acc_stderr,none": 0.013744022550571952,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.39505119453924914,
						"acc_norm,none": 0.42150170648464164,
						"acc_norm_stderr,none": 0.014430197069326021,
						"acc_stderr,none": 0.014285898292938167,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7386363636363636,
						"acc_norm,none": 0.7083333333333334,
						"acc_norm_stderr,none": 0.009326752065621158,
						"acc_stderr,none": 0.00901583836660819,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8360746268656717,
						"acc_stderr,none": 0.15239545957101686,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.00905439020486644,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437876,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578156,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.822,
						"acc_stderr,none": 0.01210216767618358,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.0091888756349967,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.781,
						"acc_stderr,none": 0.013084731950262012,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.01549968516584259,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877366,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837956,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437555,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565678,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817157,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.0059721576223896325,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.0087285272060748,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315153,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178364,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074789,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.757,
						"acc_stderr,none": 0.013569640199177446,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.758,
						"acc_stderr,none": 0.013550631705555953,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.776,
						"acc_stderr,none": 0.013190830072364466,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665542,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403626,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.406,
						"acc_stderr,none": 0.015537226438634593,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.010131468138756991,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.012486268734370145,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.653,
						"acc_stderr,none": 0.015060472031706618,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731973,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295447,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118585,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280307,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745904,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.635,
						"acc_stderr,none": 0.015231776226264915,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343965,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.531,
						"acc_stderr,none": 0.015788865959539006,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.618,
						"acc_stderr,none": 0.015372453034968531,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.68,
						"acc_stderr,none": 0.014758652303574885,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706818,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992434,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024968,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524289,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.796,
						"acc_stderr,none": 0.01274937435902438,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.00662781471738072,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469428,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592074,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.011800434324644588,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.545,
						"acc_stderr,none": 0.015755101498347086,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.005568393575081354,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866437,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256558,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.731,
						"acc_stderr,none": 0.014029819522568196,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.015819173374302706,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357791,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697593,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.641,
						"acc_stderr,none": 0.0151772642247986,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108658,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.811,
						"acc_stderr,none": 0.012386784588117717,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.011297239823409303,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323492,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704152,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.0044294039801783475,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.0062736240211188,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.01527525231651936,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.327,
						"acc_stderr,none": 0.014842213153411252,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.29778967363149716,
						"acc_norm,none": 0.29778967363149716,
						"acc_norm_stderr,none": 0.05505750916963189,
						"acc_stderr,none": 0.05505750916963189,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676975,
						"acc_stderr,none": 0.03410167836676975,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2905405405405405,
						"acc_norm,none": 0.2905405405405405,
						"acc_norm_stderr,none": 0.03744626397928733,
						"acc_stderr,none": 0.03744626397928733,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.033635910482728223,
						"acc_stderr,none": 0.033635910482728223,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.038851434494290536,
						"acc_stderr,none": 0.038851434494290536,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624336,
						"acc_stderr,none": 0.03546563019624336,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.35406698564593303,
						"acc_norm,none": 0.35406698564593303,
						"acc_norm_stderr,none": 0.03315925698294869,
						"acc_stderr,none": 0.03315925698294869,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.21875,
						"acc_norm,none": 0.21875,
						"acc_norm_stderr,none": 0.032784644885244255,
						"acc_stderr,none": 0.032784644885244255,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.31297709923664124,
						"acc_norm,none": 0.31297709923664124,
						"acc_norm_stderr,none": 0.04066962905677698,
						"acc_stderr,none": 0.04066962905677698,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3014705882352941,
						"acc_norm,none": 0.3014705882352941,
						"acc_norm_stderr,none": 0.039495529298273935,
						"acc_stderr,none": 0.039495529298273935,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.04556837693674772,
						"acc_stderr,none": 0.04556837693674772,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.33126934984520123,
						"acc_norm,none": 0.33126934984520123,
						"acc_norm_stderr,none": 0.02622939698399315,
						"acc_stderr,none": 0.02622939698399315,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.31862745098039214,
						"acc_norm,none": 0.31862745098039214,
						"acc_norm_stderr,none": 0.03270287181482081,
						"acc_stderr,none": 0.03270287181482081,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.29608938547486036,
						"acc_norm,none": 0.29608938547486036,
						"acc_norm_stderr,none": 0.03421843754304871,
						"acc_stderr,none": 0.03421843754304871,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2742616033755274,
						"acc_norm,none": 0.2742616033755274,
						"acc_norm_stderr,none": 0.02904133351059804,
						"acc_stderr,none": 0.02904133351059804,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.040225592469367126,
						"acc_stderr,none": 0.040225592469367126,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004223,
						"acc_stderr,none": 0.04742907046004223,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.37735849056603776,
						"acc_norm,none": 0.37735849056603776,
						"acc_norm_stderr,none": 0.047304390228528934,
						"acc_stderr,none": 0.047304390228528934,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.23148148148148148,
						"acc_norm,none": 0.23148148148148148,
						"acc_norm_stderr,none": 0.04077494709252627,
						"acc_stderr,none": 0.04077494709252627,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2490842490842491,
						"acc_norm,none": 0.2490842490842491,
						"acc_norm_stderr,none": 0.02622311550050611,
						"acc_stderr,none": 0.02622311550050611,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03308611113236435,
						"acc_stderr,none": 0.03308611113236435,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.30994152046783624,
						"acc_norm,none": 0.30994152046783624,
						"acc_norm_stderr,none": 0.035469769593931624,
						"acc_stderr,none": 0.035469769593931624,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.03711513959675177,
						"acc_stderr,none": 0.03711513959675177,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2805755395683453,
						"acc_norm,none": 0.2805755395683453,
						"acc_norm_stderr,none": 0.03824529014900686,
						"acc_stderr,none": 0.03824529014900686,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.32075471698113206,
						"acc_norm,none": 0.32075471698113206,
						"acc_norm_stderr,none": 0.03713396279871006,
						"acc_stderr,none": 0.03713396279871006,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.31901840490797545,
						"acc_norm,none": 0.31901840490797545,
						"acc_norm_stderr,none": 0.03661997551073836,
						"acc_stderr,none": 0.03661997551073836,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.033366051897610646,
						"acc_stderr,none": 0.033366051897610646,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2896825396825397,
						"acc_norm,none": 0.2896825396825397,
						"acc_norm_stderr,none": 0.028631924753360995,
						"acc_stderr,none": 0.028631924753360995,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.03191178226713547,
						"acc_stderr,none": 0.03191178226713547,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.40756302521008403,
						"acc_norm,none": 0.40756302521008403,
						"acc_norm_stderr,none": 0.03191863374478465,
						"acc_stderr,none": 0.03191863374478465,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.02836109930007507,
						"acc_stderr,none": 0.02836109930007507,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3006993006993007,
						"acc_norm,none": 0.3006993006993007,
						"acc_norm_stderr,none": 0.03848167949490064,
						"acc_stderr,none": 0.03848167949490064,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26136363636363635,
						"acc_norm,none": 0.26136363636363635,
						"acc_norm_stderr,none": 0.033213825516355905,
						"acc_stderr,none": 0.033213825516355905,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3221476510067114,
						"acc_norm,none": 0.3221476510067114,
						"acc_norm_stderr,none": 0.038411757592369186,
						"acc_stderr,none": 0.038411757592369186,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552485,
						"acc_stderr,none": 0.03703667194552485,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3135593220338983,
						"acc_norm,none": 0.3135593220338983,
						"acc_norm_stderr,none": 0.04289122333662572,
						"acc_stderr,none": 0.04289122333662572,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.03831305140884603,
						"acc_stderr,none": 0.03831305140884603,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695623,
						"acc_stderr,none": 0.03737392962695623,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.3412698412698413,
						"acc_norm,none": 0.3412698412698413,
						"acc_norm_stderr,none": 0.04240799327574923,
						"acc_stderr,none": 0.04240799327574923,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2810810810810811,
						"acc_norm,none": 0.2810810810810811,
						"acc_norm_stderr,none": 0.033139568735498726,
						"acc_stderr,none": 0.033139568735498726,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3313953488372093,
						"acc_norm,none": 0.3313953488372093,
						"acc_norm_stderr,none": 0.03599646438179591,
						"acc_stderr,none": 0.03599646438179591,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2725060827250608,
						"acc_norm,none": 0.2725060827250608,
						"acc_norm_stderr,none": 0.02198927219610504,
						"acc_stderr,none": 0.02198927219610504,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.38317757009345793,
						"acc_norm,none": 0.38317757009345793,
						"acc_norm_stderr,none": 0.03331120297324245,
						"acc_stderr,none": 0.03331120297324245,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2601626016260163,
						"acc_norm,none": 0.2601626016260163,
						"acc_norm_stderr,none": 0.03972012975450536,
						"acc_stderr,none": 0.03972012975450536,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3114754098360656,
						"acc_norm,none": 0.3114754098360656,
						"acc_norm_stderr,none": 0.04209969267310141,
						"acc_stderr,none": 0.04209969267310141,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03260773253630123,
						"acc_stderr,none": 0.03260773253630123,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03523442817211266,
						"acc_stderr,none": 0.03523442817211266,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.034380708208626445,
						"acc_stderr,none": 0.034380708208626445,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.04126514736324099,
						"acc_stderr,none": 0.04126514736324099,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.30344827586206896,
						"acc_norm,none": 0.30344827586206896,
						"acc_norm_stderr,none": 0.038312260488503336,
						"acc_stderr,none": 0.038312260488503336,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3619047619047619,
						"acc_norm,none": 0.3619047619047619,
						"acc_norm_stderr,none": 0.04712194748483612,
						"acc_stderr,none": 0.04712194748483612,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26857142857142857,
						"acc_norm,none": 0.26857142857142857,
						"acc_norm_stderr,none": 0.033600151915923894,
						"acc_stderr,none": 0.033600151915923894,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.03046967065084667,
						"acc_stderr,none": 0.03046967065084667,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2473404255319149,
						"acc_norm,none": 0.2473404255319149,
						"acc_norm_stderr,none": 0.022280822212812246,
						"acc_stderr,none": 0.022280822212812246,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3706896551724138,
						"acc_norm,none": 0.3706896551724138,
						"acc_norm_stderr,none": 0.03177837449226177,
						"acc_stderr,none": 0.03177837449226177,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3620689655172414,
						"acc_norm,none": 0.3620689655172414,
						"acc_norm_stderr,none": 0.036539236154659684,
						"acc_stderr,none": 0.036539236154659684,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3274336283185841,
						"acc_norm,none": 0.3274336283185841,
						"acc_norm_stderr,none": 0.031285129400738305,
						"acc_stderr,none": 0.031285129400738305,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.03567969772268047,
						"acc_stderr,none": 0.03567969772268047,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3254437869822485,
						"acc_norm,none": 0.3254437869822485,
						"acc_norm_stderr,none": 0.03614867847292203,
						"acc_stderr,none": 0.03614867847292203,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2981366459627329,
						"acc_norm,none": 0.2981366459627329,
						"acc_norm_stderr,none": 0.03616379286462019,
						"acc_stderr,none": 0.03616379286462019,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0463559874942472,
						"mcc_stderr,none": 0.029410776500703156
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.03265986323710906,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5384111481657933,
						"acc_stderr,none": 0.0101484578452747,
						"alias": "glue",
						"f1,none": 0.6574765528239048,
						"f1_stderr,none": 0.0002728662940626163,
						"mcc,none": 0.0463559874942472,
						"mcc_stderr,none": 0.029410776500703156
					},
					"hellaswag": {
						"acc,none": 0.5254929296952798,
						"acc_norm,none": 0.7090221071499702,
						"acc_norm_stderr,none": 0.004532850566893531,
						"acc_stderr,none": 0.004983291578289046,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7154084999029692,
						"acc_stderr,none": 0.01669944790417121,
						"alias": "lambada",
						"perplexity,none": 3.81425695648634,
						"perplexity_stderr,none": 0.23216706931219566
					},
					"lambada_multilingual": {
						"acc,none": 0.5406171162429653,
						"acc_stderr,none": 0.08583518673379903,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.61205345434061,
						"perplexity_stderr,none": 8.123644477245614
					},
					"lambada_openai": {
						"acc,none": 0.7459732194837958,
						"acc_stderr,none": 0.006064757540495048,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3788700395663867,
						"perplexity_stderr,none": 0.06593735455525863
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4242189016107122,
						"acc_stderr,none": 0.006885504751619322,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.212084212072504,
						"perplexity_stderr,none": 1.8764773980155438
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7463613429070445,
						"acc_stderr,none": 0.006061698956508256,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3780051694888567,
						"perplexity_stderr,none": 0.06598916983806231
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4527459732194838,
						"acc_stderr,none": 0.006934798617263741,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.282134172460168,
						"perplexity_stderr,none": 1.385765510276373
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5552105569571124,
						"acc_stderr,none": 0.006923379948184629,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.004381827211912,
						"perplexity_stderr,none": 0.7718351308827663
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5245488065204735,
						"acc_stderr,none": 0.006957576583374083,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.183661890469622,
						"perplexity_stderr,none": 1.1169080128872735
					},
					"lambada_standard": {
						"acc,none": 0.6844556568988939,
						"acc_stderr,none": 0.006474629636371577,
						"alias": " - lambada_standard",
						"perplexity,none": 4.25052604822976,
						"perplexity_stderr,none": 0.09096229477790393
					},
					"logiqa": {
						"acc,none": 0.2626728110599078,
						"acc_norm,none": 0.2887864823348694,
						"acc_norm_stderr,none": 0.01777590633653924,
						"acc_stderr,none": 0.017261598347857544,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.33485258510183735,
						"acc_stderr,none": 0.06479953249093713,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036845,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34814814814814815,
						"acc_stderr,none": 0.041153246103369526,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.27631578947368424,
						"acc_stderr,none": 0.03639057569952924,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4037735849056604,
						"acc_stderr,none": 0.03019761160019795,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3680555555555556,
						"acc_stderr,none": 0.040329990539607195,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.37572254335260113,
						"acc_stderr,none": 0.036928207672648664,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.27450980392156865,
						"acc_stderr,none": 0.044405219061793275,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145632,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.34893617021276596,
						"acc_stderr,none": 0.031158522131357783,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.04049339297748141,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2620689655172414,
						"acc_stderr,none": 0.036646663372252565,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2328042328042328,
						"acc_stderr,none": 0.021765961672154534,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.04073524322147126,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3903225806451613,
						"acc_stderr,none": 0.027751256636969576,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.03144712581678242,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.03825460278380026,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.43434343434343436,
						"acc_stderr,none": 0.035315058793591834,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.43005181347150256,
						"acc_stderr,none": 0.03572954333144809,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.33589743589743587,
						"acc_stderr,none": 0.023946724741563973,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.26296296296296295,
						"acc_stderr,none": 0.02684205787383371,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2773109243697479,
						"acc_stderr,none": 0.029079374539480007,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.25165562913907286,
						"acc_stderr,none": 0.035433042343899844,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.4018348623853211,
						"acc_stderr,none": 0.021020106172997016,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.16203703703703703,
						"acc_stderr,none": 0.02513045365226846,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.46568627450980393,
						"acc_stderr,none": 0.03501038327635897,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.4092827004219409,
						"acc_stderr,none": 0.03200704183359592,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3632286995515695,
						"acc_stderr,none": 0.032277904428505,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.40458015267175573,
						"acc_stderr,none": 0.043046937953806645,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3232731137088204,
						"acc_stderr,none": 0.05987948359899165,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.3140495867768595,
						"acc_stderr,none": 0.042369647530410184,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.04414343666854933,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.31901840490797545,
						"acc_stderr,none": 0.03661997551073836,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.24107142857142858,
						"acc_stderr,none": 0.04059867246952686,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4563106796116505,
						"acc_stderr,none": 0.049318019942204146,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3888888888888889,
						"acc_stderr,none": 0.031937057262002924,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.04975698519562428,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.44061302681992337,
						"acc_stderr,none": 0.01775339697390848,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.36416184971098264,
						"acc_stderr,none": 0.025906632631016117,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3431372549019608,
						"acc_stderr,none": 0.027184498909941613,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.38525909237206307,
						"acc_stderr,none": 0.049147967004506105,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.40192926045016075,
						"acc_stderr,none": 0.02784647600593048,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.39197530864197533,
						"acc_stderr,none": 0.027163686038271215,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.24822695035460993,
						"acc_stderr,none": 0.025770015644290392,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2848761408083442,
						"acc_stderr,none": 0.011527830846369016,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.43014705882352944,
						"acc_stderr,none": 0.030074971917302875,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3104575163398693,
						"acc_stderr,none": 0.01871806705262322,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.41818181818181815,
						"acc_stderr,none": 0.04724577405731572,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.22040816326530613,
						"acc_stderr,none": 0.02653704531214531,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.354891127721807,
						"acc_stderr,none": 0.06031337028984213,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.46766169154228854,
						"acc_stderr,none": 0.03528131472933607,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.28290516967967017,
						"acc_stderr,none": 0.06506629255574989,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3674698795180723,
						"acc_stderr,none": 0.03753267402120575,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.4444444444444444,
						"acc_stderr,none": 0.0381107966983353,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3643402954661233,
						"acc_stderr,none": 0.004857836762131825,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.36065907241659884,
						"acc_stderr,none": 0.004843015243984154,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7328431372549019,
						"acc_stderr,none": 0.0219326685441502,
						"alias": " - mrpc",
						"f1,none": 0.833587786259542,
						"f1_stderr,none": 0.01574158711414479
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.006371191135734072,
						"exact_match_stderr,remove_whitespace": 0.0013244298594293294
					},
					"openbookqa": {
						"acc,none": 0.306,
						"acc_norm,none": 0.416,
						"acc_norm_stderr,none": 0.022064943313928866,
						"acc_stderr,none": 0.020629569998345393,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.414,
						"acc_stderr,none": 0.011016473180681309,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.388,
						"acc_stderr,none": 0.010898962964284812,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3825,
						"acc_stderr,none": 0.01086995643857379,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.011131484850525779,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.529,
						"acc_stderr,none": 0.011164310140373716,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.4965,
						"acc_stderr,none": 0.011182862030875934,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.011181704488030008,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4665714285714286,
						"acc_stderr,none": 0.05360782093398782,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7687704026115343,
						"acc_norm,none": 0.7725788900979326,
						"acc_norm_stderr,none": 0.00977985076784725,
						"acc_stderr,none": 0.009837063180625326,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7421649126991223,
						"acc_norm,none": 0.617921984070359,
						"acc_norm_stderr,none": 0.010241040526241307,
						"acc_stderr,none": 0.14542911735610292,
						"alias": "pythia",
						"bits_per_byte,none": 0.6337753600979277,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5516200892795335,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3788700395663867,
						"perplexity_stderr,none": 0.06593735455525863,
						"word_perplexity,none": 10.476482130561946,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49789492952590153,
						"acc_stderr,none": 0.006765350592089551,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6187979223348998,
						"acc_stderr,none": 0.002415491720851279,
						"alias": " - qqp",
						"f1,none": 0.6559514242343066,
						"f1_stderr,none": 0.002607061928393721
					},
					"record": {
						"alias": "record",
						"em,none": 0.2706,
						"em_stderr,none": 0.004442919980575463,
						"f1,none": 0.27972523832023144,
						"f1_stderr,none": 0.004452314004344551
					},
					"rte": {
						"acc,none": 0.628158844765343,
						"acc_stderr,none": 0.029091018492217447,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.948,
						"acc_norm,none": 0.921,
						"acc_norm_stderr,none": 0.008534156773333456,
						"acc_stderr,none": 0.0070246242138171456,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9174311926605505,
						"acc_stderr,none": 0.009325791021628803,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3256904033793372,
						"acc_stderr,none": 0.0017058484527051794,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.34394124847001223,
						"bleu_acc_stderr,none": 0.016629087514276806,
						"bleu_diff,none": -6.212602477683713,
						"bleu_diff_stderr,none": 0.9046111199929038,
						"bleu_max,none": 28.024366969502058,
						"bleu_max_stderr,none": 0.8205890920330008,
						"rouge1_acc,none": 0.3072215422276622,
						"rouge1_acc_stderr,none": 0.01615020132132304,
						"rouge1_diff,none": -7.827088374730196,
						"rouge1_diff_stderr,none": 1.0004341333672038,
						"rouge1_max,none": 52.87046191174578,
						"rouge1_max_stderr,none": 0.8900386830347308,
						"rouge2_acc,none": 0.27539779681762544,
						"rouge2_acc_stderr,none": 0.01563813566777552,
						"rouge2_diff,none": -9.56772502002179,
						"rouge2_diff_stderr,none": 1.18121598145371,
						"rouge2_max,none": 36.940379649756714,
						"rouge2_max_stderr,none": 1.0550728695505844,
						"rougeL_acc,none": 0.3011015911872705,
						"rougeL_acc_stderr,none": 0.016058999026100588,
						"rougeL_diff,none": -8.147797031614575,
						"rougeL_diff_stderr,none": 1.0090177970323322,
						"rougeL_max,none": 50.05199196229567,
						"rougeL_max_stderr,none": 0.9112620752225341
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.34394124847001223,
						"bleu_acc_stderr,none": 0.016629087514276806,
						"bleu_diff,none": -6.212602477683713,
						"bleu_diff_stderr,none": 0.9046111199929038,
						"bleu_max,none": 28.024366969502058,
						"bleu_max_stderr,none": 0.8205890920330008,
						"rouge1_acc,none": 0.3072215422276622,
						"rouge1_acc_stderr,none": 0.01615020132132304,
						"rouge1_diff,none": -7.827088374730196,
						"rouge1_diff_stderr,none": 1.0004341333672038,
						"rouge1_max,none": 52.87046191174578,
						"rouge1_max_stderr,none": 0.8900386830347308,
						"rouge2_acc,none": 0.27539779681762544,
						"rouge2_acc_stderr,none": 0.01563813566777552,
						"rouge2_diff,none": -9.56772502002179,
						"rouge2_diff_stderr,none": 1.18121598145371,
						"rouge2_max,none": 36.940379649756714,
						"rouge2_max_stderr,none": 1.0550728695505844,
						"rougeL_acc,none": 0.3011015911872705,
						"rougeL_acc_stderr,none": 0.016058999026100588,
						"rougeL_diff,none": -8.147797031614575,
						"rougeL_diff_stderr,none": 1.0090177970323322,
						"rougeL_max,none": 50.05199196229567,
						"rougeL_max_stderr,none": 0.9112620752225341
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2484700122399021,
						"acc_stderr,none": 0.015127427096520681,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4029107945187724,
						"acc_stderr,none": 0.01413506842499623,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6337753600979277,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5516200892795335,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.476482130561946,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6685082872928176,
						"acc_stderr,none": 0.013230397198964652,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.43661971830985913,
						"acc_stderr,none": 0.0592793555841297,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5096153846153846,
						"acc_stderr,none": 0.0492573531427353,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6221818181818182,
						"acc_stderr,none": 0.07329456526572707,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.0219308441207285,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.02235279165091416,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.019874354831287497,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.01968468882019472,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.02238235778196214,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.64,
						"acc_stderr,none": 0.02148775108972052,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.020475118092988964,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.712,
						"acc_stderr,none": 0.02027150383507522,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4372155287817938,
						"acc_stderr,none": 0.04802857391524508,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939166,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4650602409638554,
						"acc_stderr,none": 0.009997573294114558,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4891566265060241,
						"acc_stderr,none": 0.010019715824483485,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3891566265060241,
						"acc_stderr,none": 0.009772702993836013,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5377510040160642,
						"acc_stderr,none": 0.009993466360872788,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4947791164658635,
						"acc_stderr,none": 0.010021526496530339,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4951807228915663,
						"acc_stderr,none": 0.010021607322475472,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43132530120481927,
						"acc_stderr,none": 0.009927090290379251,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4831325301204819,
						"acc_stderr,none": 0.010016368453021547,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3931726907630522,
						"acc_stderr,none": 0.009790655797269846,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40441767068273093,
						"acc_stderr,none": 0.009837245625453012,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.45943775100401607,
						"acc_stderr,none": 0.009989039874786896,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41004016064257026,
						"acc_stderr,none": 0.009858525713807855,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42008032128514056,
						"acc_stderr,none": 0.009893219469115701,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.351004016064257,
						"acc_stderr,none": 0.009566753834803288,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6313699536730641,
						"acc_stderr,none": 0.06166699757075865,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5956320317670417,
						"acc_stderr,none": 0.012629580396570946,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7816015883520847,
						"acc_stderr,none": 0.010632343054700491,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.71409662475182,
						"acc_stderr,none": 0.011627856346940616,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5605559232296492,
						"acc_stderr,none": 0.01277240869797914,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6029119788219722,
						"acc_stderr,none": 0.012591627740247465,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6585043017868961,
						"acc_stderr,none": 0.01220347324121445,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5380542686962276,
						"acc_stderr,none": 0.012829804720321691,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6810059563203177,
						"acc_stderr,none": 0.011994392833931961,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5585704831237591,
						"acc_stderr,none": 0.012778538985880637,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5982792852415619,
						"acc_stderr,none": 0.012616114526927905,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6558570483123759,
						"acc_stderr,none": 0.012226032926509716,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8057990559676331,
						"acc_stderr,none": 0.035792425222250554,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8619354838709677,
						"acc_stderr,none": 0.007155835621381236,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7424400417101147,
						"acc_stderr,none": 0.014128209029143982,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.779467680608365,
						"acc_stderr,none": 0.025614420399944937,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6730158730158731,
						"acc_stderr,none": 0.026473487980890983,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7797619047619048,
						"acc_stderr,none": 0.018477501049056294,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/r3-c1-8_pth"
	},
	"./rwkv-x-dev/r3-testchunk-1-8-no-cuda-with-warmup_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6231679819616686,
						"acc_norm,none": 0.6082299887260428,
						"acc_norm_stderr,none": 0.09054992613398766,
						"acc_stderr,none": 0.10867090891861504,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.354375,
						"acc_stderr,none": 0.0163580375105246,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8335820895522388,
						"acc_stderr,none": 0.15133040703803005,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.29390433431186336,
						"acc_norm,none": 0.29390433431186336,
						"acc_norm_stderr,none": 0.05538242008083191,
						"acc_stderr,none": 0.05538242008083191,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.534674249642687,
						"acc_stderr,none": 0.012442106604893175,
						"alias": "glue",
						"f1,none": 0.6602791222709579,
						"f1_stderr,none": 0.0002690711093614733,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.7108480496797982,
						"acc_stderr,none": 0.017205836842963076,
						"alias": "lambada",
						"perplexity,none": 3.847726732141799,
						"perplexity_stderr,none": 0.25265539554736594
					},
					"lambada_multilingual": {
						"acc,none": 0.5361925092179313,
						"acc_stderr,none": 0.08589946387772861,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.991805740231875,
						"perplexity_stderr,none": 8.220666161596865
					},
					"mmlu": {
						"acc,none": 0.3146275459336277,
						"acc_stderr,none": 0.05880633606239949,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.30393198724760895,
						"acc_stderr,none": 0.0534364834943281,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3646604441583521,
						"acc_stderr,none": 0.047703931866617054,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3262918427039324,
						"acc_stderr,none": 0.05747959044146185,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.26990168093878847,
						"acc_stderr,none": 0.05521336680026564,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.48328571428571426,
						"acc_stderr,none": 0.0560561678986785,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7373974757436703,
						"acc_norm,none": 0.6129431620361558,
						"acc_norm_stderr,none": 0.010220488656312953,
						"acc_stderr,none": 0.14400682851750338,
						"alias": "pythia",
						"bits_per_byte,none": 0.6338144870022319,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.551662170878407,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3691144730932994,
						"perplexity_stderr,none": 0.06625623684847463,
						"word_perplexity,none": 10.478001609563373,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.31867468741315647,
						"acc_stderr,none": 0.0014864150049306065,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.016339170373280906,
						"bleu_diff,none": -7.460490705539453,
						"bleu_diff_stderr,none": 0.8950347628752554,
						"bleu_max,none": 27.361223632558687,
						"bleu_max_stderr,none": 0.8210922632451596,
						"rouge1_acc,none": 0.2839657282741738,
						"rouge1_acc_stderr,none": 0.015785370858396736,
						"rouge1_diff,none": -9.990579379907144,
						"rouge1_diff_stderr,none": 0.9593125622108607,
						"rouge1_max,none": 51.90999910585592,
						"rouge1_max_stderr,none": 0.8927526992719707,
						"rouge2_acc,none": 0.2533659730722154,
						"rouge2_acc_stderr,none": 0.015225899340826852,
						"rouge2_diff,none": -12.085598250810639,
						"rouge2_diff_stderr,none": 1.16518182916759,
						"rouge2_max,none": 35.72866045407304,
						"rouge2_max_stderr,none": 1.0531571748740225,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.015764770836777298,
						"rougeL_diff,none": -10.37795626225276,
						"rougeL_diff_stderr,none": 0.9750861508263083,
						"rougeL_max,none": 49.20513838930842,
						"rougeL_max_stderr,none": 0.915117504198659
					},
					"xcopa": {
						"acc,none": 0.6223636363636362,
						"acc_stderr,none": 0.07095721581010465,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43654618473895584,
						"acc_stderr,none": 0.04963249690155001,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6310089645629023,
						"acc_stderr,none": 0.061107532728020476,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.814340301191279,
						"acc_stderr,none": 0.035387810655407276,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6231679819616686,
						"acc_norm,none": 0.6082299887260428,
						"acc_norm_stderr,none": 0.09054992613398766,
						"acc_stderr,none": 0.10867090891861504,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.354375,
						"acc_stderr,none": 0.0163580375105246,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.015275252316519362,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.349,
						"acc_stderr,none": 0.015080663991563097,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3458333333333333,
						"acc_stderr,none": 0.013736245342311012,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.39334470989761094,
						"acc_norm,none": 0.41723549488054607,
						"acc_norm_stderr,none": 0.014409825518403077,
						"acc_stderr,none": 0.014275101465693024,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7365319865319865,
						"acc_norm,none": 0.7024410774410774,
						"acc_norm_stderr,none": 0.009381226721815534,
						"acc_stderr,none": 0.009039157374497717,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8335820895522388,
						"acc_stderr,none": 0.15133040703803005,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523724,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844884,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578221,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.011828605831454264,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.00944924802766274,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.781,
						"acc_stderr,none": 0.013084731950262033,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.015572363292015093,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.795,
						"acc_stderr,none": 0.01277255409611311,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.01067487484483796,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346933,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998106,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163039,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.00648892179842742,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118758,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866446,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286418,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611442,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662777,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.725,
						"acc_stderr,none": 0.014127086556490528,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.761,
						"acc_stderr,none": 0.013493000446937593,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.754,
						"acc_stderr,none": 0.013626065817750641,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557414,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890143,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319424,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.339,
						"acc_stderr,none": 0.014976758771620342,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653893,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366664,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.641,
						"acc_stderr,none": 0.015177264224798592,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.802,
						"acc_stderr,none": 0.012607733934175313,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315176,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491125,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474934,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942302,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.014267009061031307,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847164,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.478,
						"acc_stderr,none": 0.015803979428161943,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.635,
						"acc_stderr,none": 0.015231776226264888,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.614,
						"acc_stderr,none": 0.015402637476784371,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275288,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037186,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340995,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.782,
						"acc_stderr,none": 0.01306317904059529,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666692,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.001000000000000009,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753655,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.0111717862854965,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.531,
						"acc_stderr,none": 0.01578886595953901,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452369,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866447,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767593,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.723,
						"acc_stderr,none": 0.014158794845306263,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.487,
						"acc_stderr,none": 0.015813952101896622,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108668,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244066,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858924,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381782,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653869,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.802,
						"acc_stderr,none": 0.012607733934175318,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475293,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832012,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323492,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745029937,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.00539714082909922,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.385,
						"acc_stderr,none": 0.01539519444541081,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.312,
						"acc_stderr,none": 0.014658474370509012,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.29390433431186336,
						"acc_norm,none": 0.29390433431186336,
						"acc_norm_stderr,none": 0.05538242008083191,
						"acc_stderr,none": 0.05538242008083191,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2905405405405405,
						"acc_norm,none": 0.2905405405405405,
						"acc_norm_stderr,none": 0.03744626397928733,
						"acc_stderr,none": 0.03744626397928733,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.22560975609756098,
						"acc_norm,none": 0.22560975609756098,
						"acc_norm_stderr,none": 0.03273897454566342,
						"acc_stderr,none": 0.03273897454566342,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.3875,
						"acc_norm,none": 0.3875,
						"acc_norm_stderr,none": 0.03863583812241406,
						"acc_stderr,none": 0.03863583812241406,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624335,
						"acc_stderr,none": 0.03546563019624335,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3492822966507177,
						"acc_norm,none": 0.3492822966507177,
						"acc_norm_stderr,none": 0.033056200243000926,
						"acc_stderr,none": 0.033056200243000926,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.26717557251908397,
						"acc_norm,none": 0.26717557251908397,
						"acc_norm_stderr,none": 0.038808483010823944,
						"acc_stderr,none": 0.038808483010823944,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.03921568627450978,
						"acc_stderr,none": 0.03921568627450978,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.0452235007738203,
						"acc_stderr,none": 0.0452235007738203,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.32507739938080493,
						"acc_norm,none": 0.32507739938080493,
						"acc_norm_stderr,none": 0.02610312109754256,
						"acc_stderr,none": 0.02610312109754256,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.27941176470588236,
						"acc_norm,none": 0.27941176470588236,
						"acc_norm_stderr,none": 0.031493281045079556,
						"acc_stderr,none": 0.031493281045079556,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3128491620111732,
						"acc_norm,none": 0.3128491620111732,
						"acc_norm_stderr,none": 0.03475229875511106,
						"acc_stderr,none": 0.03475229875511106,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.28270042194092826,
						"acc_norm,none": 0.28270042194092826,
						"acc_norm_stderr,none": 0.02931281415395592,
						"acc_stderr,none": 0.02931281415395592,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.19811320754716982,
						"acc_norm,none": 0.19811320754716982,
						"acc_norm_stderr,none": 0.03889722288318549,
						"acc_stderr,none": 0.03889722288318549,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.411214953271028,
						"acc_norm,none": 0.411214953271028,
						"acc_norm_stderr,none": 0.04779251692801369,
						"acc_stderr,none": 0.04779251692801369,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.39622641509433965,
						"acc_norm,none": 0.39622641509433965,
						"acc_norm_stderr,none": 0.04773249298367361,
						"acc_stderr,none": 0.04773249298367361,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980982,
						"acc_stderr,none": 0.03957835471980982,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.04084247315337099,
						"acc_stderr,none": 0.04084247315337099,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.21978021978021978,
						"acc_norm,none": 0.21978021978021978,
						"acc_norm_stderr,none": 0.025108358900325766,
						"acc_stderr,none": 0.025108358900325766,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.03320574612945431,
						"acc_stderr,none": 0.03320574612945431,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2807017543859649,
						"acc_norm,none": 0.2807017543859649,
						"acc_norm_stderr,none": 0.034462962170884265,
						"acc_stderr,none": 0.034462962170884265,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.03653847510896056,
						"acc_stderr,none": 0.03653847510896056,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.28776978417266186,
						"acc_norm,none": 0.28776978417266186,
						"acc_norm_stderr,none": 0.03853836179233389,
						"acc_stderr,none": 0.03853836179233389,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.037676093121953455,
						"acc_stderr,none": 0.037676093121953455,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3312883435582822,
						"acc_norm,none": 0.3312883435582822,
						"acc_norm_stderr,none": 0.03697983910025588,
						"acc_stderr,none": 0.03697983910025588,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.29069767441860467,
						"acc_norm,none": 0.29069767441860467,
						"acc_norm_stderr,none": 0.034724693044775976,
						"acc_stderr,none": 0.034724693044775976,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.028271399816988556,
						"acc_stderr,none": 0.028271399816988556,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.23737373737373738,
						"acc_norm,none": 0.23737373737373738,
						"acc_norm_stderr,none": 0.030313710538198885,
						"acc_stderr,none": 0.030313710538198885,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.40756302521008403,
						"acc_norm,none": 0.40756302521008403,
						"acc_norm_stderr,none": 0.03191863374478465,
						"acc_stderr,none": 0.03191863374478465,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26956521739130435,
						"acc_norm,none": 0.26956521739130435,
						"acc_norm_stderr,none": 0.029322764228949527,
						"acc_stderr,none": 0.029322764228949527,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3356643356643357,
						"acc_norm,none": 0.3356643356643357,
						"acc_norm_stderr,none": 0.03962800523347343,
						"acc_stderr,none": 0.03962800523347343,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2840909090909091,
						"acc_norm,none": 0.2840909090909091,
						"acc_norm_stderr,none": 0.034090909090909075,
						"acc_stderr,none": 0.034090909090909075,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2751677852348993,
						"acc_norm,none": 0.2751677852348993,
						"acc_norm_stderr,none": 0.03671019403342562,
						"acc_stderr,none": 0.03671019403342562,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.03661433360410718,
						"acc_stderr,none": 0.03661433360410718,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3220338983050847,
						"acc_norm,none": 0.3220338983050847,
						"acc_norm_stderr,none": 0.043197822302613445,
						"acc_stderr,none": 0.043197822302613445,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.04013964554072774,
						"acc_stderr,none": 0.04013964554072774,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.27972027972027974,
						"acc_norm,none": 0.27972027972027974,
						"acc_norm_stderr,none": 0.037667638895398536,
						"acc_stderr,none": 0.037667638895398536,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.04073524322147127,
						"acc_stderr,none": 0.04073524322147127,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.03253020905593335,
						"acc_stderr,none": 0.03253020905593335,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3372093023255814,
						"acc_norm,none": 0.3372093023255814,
						"acc_norm_stderr,none": 0.036152631988716356,
						"acc_stderr,none": 0.036152631988716356,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2725060827250608,
						"acc_norm,none": 0.2725060827250608,
						"acc_norm_stderr,none": 0.021989272196105043,
						"acc_stderr,none": 0.021989272196105043,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3691588785046729,
						"acc_norm,none": 0.3691588785046729,
						"acc_norm_stderr,none": 0.03306563404172723,
						"acc_stderr,none": 0.03306563404172723,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3008130081300813,
						"acc_norm,none": 0.3008130081300813,
						"acc_norm_stderr,none": 0.04152073768551428,
						"acc_stderr,none": 0.04152073768551428,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3524590163934426,
						"acc_norm,none": 0.3524590163934426,
						"acc_norm_stderr,none": 0.0434305428342706,
						"acc_stderr,none": 0.0434305428342706,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3238095238095238,
						"acc_norm,none": 0.3238095238095238,
						"acc_norm_stderr,none": 0.032367278954043524,
						"acc_stderr,none": 0.032367278954043524,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3277777777777778,
						"acc_norm,none": 0.3277777777777778,
						"acc_norm_stderr,none": 0.03508485373860692,
						"acc_stderr,none": 0.03508485373860692,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31746031746031744,
						"acc_norm,none": 0.31746031746031744,
						"acc_norm_stderr,none": 0.033949216164478796,
						"acc_stderr,none": 0.033949216164478796,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.04126514736324099,
						"acc_stderr,none": 0.04126514736324099,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2896551724137931,
						"acc_norm,none": 0.2896551724137931,
						"acc_norm_stderr,none": 0.03780019230438014,
						"acc_stderr,none": 0.03780019230438014,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.34285714285714286,
						"acc_norm,none": 0.34285714285714286,
						"acc_norm_stderr,none": 0.04654465622977446,
						"acc_stderr,none": 0.04654465622977446,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26857142857142857,
						"acc_norm,none": 0.26857142857142857,
						"acc_norm_stderr,none": 0.033600151915923894,
						"acc_stderr,none": 0.033600151915923894,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26066350710900477,
						"acc_norm,none": 0.26066350710900477,
						"acc_norm_stderr,none": 0.030293645661742804,
						"acc_stderr,none": 0.030293645661742804,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.022517032434592285,
						"acc_stderr,none": 0.022517032434592285,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.36637931034482757,
						"acc_norm,none": 0.36637931034482757,
						"acc_norm_stderr,none": 0.03170108710059699,
						"acc_stderr,none": 0.03170108710059699,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.03613730415279119,
						"acc_stderr,none": 0.03613730415279119,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.037857144650666544,
						"acc_stderr,none": 0.037857144650666544,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3141592920353982,
						"acc_norm,none": 0.3141592920353982,
						"acc_norm_stderr,none": 0.030945344741493033,
						"acc_stderr,none": 0.030945344741493033,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624338,
						"acc_stderr,none": 0.03546563019624338,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.28994082840236685,
						"acc_norm,none": 0.28994082840236685,
						"acc_norm_stderr,none": 0.03500638924911012,
						"acc_stderr,none": 0.03500638924911012,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3105590062111801,
						"acc_norm,none": 0.3105590062111801,
						"acc_norm_stderr,none": 0.036581425432887386,
						"acc_stderr,none": 0.036581425432887386,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.0348937065201876,
						"acc_stderr,none": 0.0348937065201876,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.034873508801977704,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.534674249642687,
						"acc_stderr,none": 0.012442106604893175,
						"alias": "glue",
						"f1,none": 0.6602791222709579,
						"f1_stderr,none": 0.0002690711093614733,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.5248954391555467,
						"acc_norm,none": 0.708922525393348,
						"acc_norm_stderr,none": 0.004533307758521346,
						"acc_stderr,none": 0.004983592410934172,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7108480496797982,
						"acc_stderr,none": 0.017205836842963076,
						"alias": "lambada",
						"perplexity,none": 3.847726732141799,
						"perplexity_stderr,none": 0.25265539554736594
					},
					"lambada_multilingual": {
						"acc,none": 0.5361925092179313,
						"acc_stderr,none": 0.08589946387772861,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.991805740231875,
						"perplexity_stderr,none": 8.220666161596865
					},
					"lambada_openai": {
						"acc,none": 0.7430622938094315,
						"acc_stderr,none": 0.006087494839873366,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3691144730932994,
						"perplexity_stderr,none": 0.06625623684847463
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4207257908014749,
						"acc_stderr,none": 0.006877866423280063,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.67023603937053,
						"perplexity_stderr,none": 1.9211563088461348
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.74345041723268,
						"acc_stderr,none": 0.0060844837271676845,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.368652642859995,
						"perplexity_stderr,none": 0.06621123303511606
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45099941781486513,
						"acc_stderr,none": 0.0069324455308038945,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.528455255277866,
						"perplexity_stderr,none": 1.3961683393555175
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.549776829031632,
						"acc_stderr,none": 0.006931372038835371,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.562231869833578,
						"perplexity_stderr,none": 0.8024246636181283
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5160100912090044,
						"acc_stderr,none": 0.00696240566050428,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.829452893817415,
						"perplexity_stderr,none": 1.1562735433305324
					},
					"lambada_standard": {
						"acc,none": 0.6788278672617892,
						"acc_stderr,none": 0.006505202676138958,
						"alias": " - lambada_standard",
						"perplexity,none": 4.326362485629951,
						"perplexity_stderr,none": 0.09337747417331156
					},
					"logiqa": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.29339477726574503,
						"acc_norm_stderr,none": 0.017859032704399508,
						"acc_stderr,none": 0.017162894755127063,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3146275459336277,
						"acc_stderr,none": 0.05880633606239949,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036846,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.32592592592592595,
						"acc_stderr,none": 0.040491220417025055,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.28289473684210525,
						"acc_stderr,none": 0.03665349695640767,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3849056603773585,
						"acc_stderr,none": 0.029946498567699948,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.039420826399272135,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816508,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3872832369942196,
						"acc_stderr,none": 0.03714325906302067,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171452,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.30638297872340425,
						"acc_stderr,none": 0.030135906478517563,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03947152782669415,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.037245636197746325,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.22486772486772486,
						"acc_stderr,none": 0.02150209607822914,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2619047619047619,
						"acc_stderr,none": 0.039325376803928704,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3548387096774194,
						"acc_stderr,none": 0.027218889773308767,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.28078817733990147,
						"acc_stderr,none": 0.0316185633535861,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3939393939393939,
						"acc_stderr,none": 0.038154943086889305,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3888888888888889,
						"acc_stderr,none": 0.0347327959083696,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.45077720207253885,
						"acc_stderr,none": 0.03590910952235524,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.28974358974358977,
						"acc_stderr,none": 0.02300062824368797,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.02606715922227581,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.24789915966386555,
						"acc_stderr,none": 0.028047967224176892,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23178807947019867,
						"acc_stderr,none": 0.034454062719870546,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3394495412844037,
						"acc_stderr,none": 0.02030210934266235,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.1574074074074074,
						"acc_stderr,none": 0.02483717351824239,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4362745098039216,
						"acc_stderr,none": 0.03480693138457039,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3881856540084388,
						"acc_stderr,none": 0.03172295004332328,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.40358744394618834,
						"acc_stderr,none": 0.03292802819330314,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3511450381679389,
						"acc_stderr,none": 0.04186445163013751,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.30393198724760895,
						"acc_stderr,none": 0.0534364834943281,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.38016528925619836,
						"acc_stderr,none": 0.04431324501968432,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3425925925925926,
						"acc_stderr,none": 0.045879047413018105,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.26380368098159507,
						"acc_stderr,none": 0.03462419931615624,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841044,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.36893203883495146,
						"acc_stderr,none": 0.04777615181156739,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.4017094017094017,
						"acc_stderr,none": 0.03211693751051622,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.42656449553001274,
						"acc_stderr,none": 0.017686066975675645,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.315028901734104,
						"acc_stderr,none": 0.0250093137900697,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3104575163398693,
						"acc_stderr,none": 0.026493033225145898,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3646604441583521,
						"acc_stderr,none": 0.047703931866617054,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3408360128617363,
						"acc_stderr,none": 0.02692084126077616,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.32407407407407407,
						"acc_stderr,none": 0.026041766202717163,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2553191489361702,
						"acc_stderr,none": 0.026011992930902002,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2737940026075619,
						"acc_stderr,none": 0.011388612167979395,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3382352941176471,
						"acc_stderr,none": 0.028739328513983576,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.32189542483660133,
						"acc_stderr,none": 0.018901015322093095,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.0469237132203465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.22448979591836735,
						"acc_stderr,none": 0.026711430555538408,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3262918427039324,
						"acc_stderr,none": 0.05747959044146185,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.4129353233830846,
						"acc_stderr,none": 0.03481520803367348,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.26990168093878847,
						"acc_stderr,none": 0.05521336680026564,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3132530120481928,
						"acc_stderr,none": 0.03610805018031023,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.42105263157894735,
						"acc_stderr,none": 0.037867207062342145,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3379521141110545,
						"acc_stderr,none": 0.004774734840162472,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.33848657445077296,
						"acc_stderr,none": 0.004772448023078343,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7377450980392157,
						"acc_stderr,none": 0.02180307601533612,
						"alias": " - mrpc",
						"f1,none": 0.8351309707241911,
						"f1_stderr,none": 0.01570219675396612
					},
					"openbookqa": {
						"acc,none": 0.302,
						"acc_norm,none": 0.414,
						"acc_norm_stderr,none": 0.02204949796982787,
						"acc_stderr,none": 0.02055326917420919,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.438,
						"acc_stderr,none": 0.011096827014281873,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3765,
						"acc_stderr,none": 0.010836631916589656,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.413,
						"acc_stderr,none": 0.011012544577391415,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.011130400617630758,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5565,
						"acc_stderr,none": 0.011111507899646485,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5305,
						"acc_stderr,none": 0.011162310405413184,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.011174185930778312,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48328571428571426,
						"acc_stderr,none": 0.0560561678986785,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7682263329706203,
						"acc_norm,none": 0.778563656147987,
						"acc_norm_stderr,none": 0.009687616456840268,
						"acc_stderr,none": 0.009845143772794033,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7373974757436703,
						"acc_norm,none": 0.6129431620361558,
						"acc_norm_stderr,none": 0.010220488656312953,
						"acc_stderr,none": 0.14400682851750338,
						"alias": "pythia",
						"bits_per_byte,none": 0.6338144870022319,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.551662170878407,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3691144730932994,
						"perplexity_stderr,none": 0.06625623684847463,
						"word_perplexity,none": 10.478001609563373,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49881017755811824,
						"acc_stderr,none": 0.006765391396471467,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.624016819193668,
						"acc_stderr,none": 0.00240899476867833,
						"alias": " - qqp",
						"f1,none": 0.6587648999932655,
						"f1_stderr,none": 0.0026027581637600573
					},
					"record": {
						"alias": "record",
						"em,none": 0.2701,
						"em_stderr,none": 0.004440334520851811,
						"f1,none": 0.2803285714285711,
						"f1_stderr,none": 0.0044513828451119486
					},
					"rte": {
						"acc,none": 0.6353790613718412,
						"acc_stderr,none": 0.028972282465132403,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.945,
						"acc_norm,none": 0.922,
						"acc_norm_stderr,none": 0.008484573530118581,
						"acc_stderr,none": 0.00721297629463923,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9231651376146789,
						"acc_stderr,none": 0.009024222176285861,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.31867468741315647,
						"acc_stderr,none": 0.0014864150049306065,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.016339170373280906,
						"bleu_diff,none": -7.460490705539453,
						"bleu_diff_stderr,none": 0.8950347628752554,
						"bleu_max,none": 27.361223632558687,
						"bleu_max_stderr,none": 0.8210922632451596,
						"rouge1_acc,none": 0.2839657282741738,
						"rouge1_acc_stderr,none": 0.015785370858396736,
						"rouge1_diff,none": -9.990579379907144,
						"rouge1_diff_stderr,none": 0.9593125622108607,
						"rouge1_max,none": 51.90999910585592,
						"rouge1_max_stderr,none": 0.8927526992719707,
						"rouge2_acc,none": 0.2533659730722154,
						"rouge2_acc_stderr,none": 0.015225899340826852,
						"rouge2_diff,none": -12.085598250810639,
						"rouge2_diff_stderr,none": 1.16518182916759,
						"rouge2_max,none": 35.72866045407304,
						"rouge2_max_stderr,none": 1.0531571748740225,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.015764770836777298,
						"rougeL_diff,none": -10.37795626225276,
						"rougeL_diff_stderr,none": 0.9750861508263083,
						"rougeL_max,none": 49.20513838930842,
						"rougeL_max_stderr,none": 0.915117504198659
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.016339170373280906,
						"bleu_diff,none": -7.460490705539453,
						"bleu_diff_stderr,none": 0.8950347628752554,
						"bleu_max,none": 27.361223632558687,
						"bleu_max_stderr,none": 0.8210922632451596,
						"rouge1_acc,none": 0.2839657282741738,
						"rouge1_acc_stderr,none": 0.015785370858396736,
						"rouge1_diff,none": -9.990579379907144,
						"rouge1_diff_stderr,none": 0.9593125622108607,
						"rouge1_max,none": 51.90999910585592,
						"rouge1_max_stderr,none": 0.8927526992719707,
						"rouge2_acc,none": 0.2533659730722154,
						"rouge2_acc_stderr,none": 0.015225899340826852,
						"rouge2_diff,none": -12.085598250810639,
						"rouge2_diff_stderr,none": 1.16518182916759,
						"rouge2_max,none": 35.72866045407304,
						"rouge2_max_stderr,none": 1.0531571748740225,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.015764770836777298,
						"rougeL_diff,none": -10.37795626225276,
						"rougeL_diff_stderr,none": 0.9750861508263083,
						"rougeL_max,none": 49.20513838930842,
						"rougeL_max_stderr,none": 0.915117504198659
					},
					"truthfulqa_mc1": {
						"acc,none": 0.24724602203182375,
						"acc_stderr,none": 0.015102404797359652,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3901033527944892,
						"acc_stderr,none": 0.013871394609417833,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6338144870022319,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.551662170878407,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.478001609563373,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6795580110497238,
						"acc_stderr,none": 0.01311508545768171,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.04770204856076104,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6223636363636362,
						"acc_stderr,none": 0.07095721581010465,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.022064943313928862,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.022342748192502846,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.020055833888070924,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.019635965529725512,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.02227087748536044,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228137,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228137,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.021513662527582408,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.02035437548053008,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.020435342091896135,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43654618473895584,
						"acc_stderr,none": 0.04963249690155001,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.009476976849778584,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4682730923694779,
						"acc_stderr,none": 0.0100018761464667,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4879518072289157,
						"acc_stderr,none": 0.010019162857624487,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38835341365461845,
						"acc_stderr,none": 0.009769028875673288,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5293172690763053,
						"acc_stderr,none": 0.010004830045543991,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4951807228915663,
						"acc_stderr,none": 0.010021607322475465,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4967871485943775,
						"acc_stderr,none": 0.010021865961119552,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42730923694779116,
						"acc_stderr,none": 0.009915595034908124,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4887550200803213,
						"acc_stderr,none": 0.010019537972975077,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39437751004016064,
						"acc_stderr,none": 0.009795906230304215,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40642570281124496,
						"acc_stderr,none": 0.0098449990344642,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4465863453815261,
						"acc_stderr,none": 0.009964722457358776,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40923694779116465,
						"acc_stderr,none": 0.009855567414480243,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.43012048192771085,
						"acc_stderr,none": 0.009923711675408058,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3421686746987952,
						"acc_stderr,none": 0.00950965914301563,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6310089645629023,
						"acc_stderr,none": 0.061107532728020476,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5949702183984117,
						"acc_stderr,none": 0.012632887218751379,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7816015883520847,
						"acc_stderr,none": 0.010632343054700491,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7154202514890801,
						"acc_stderr,none": 0.011611655347089399,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5651886168100596,
						"acc_stderr,none": 0.012757297463352966,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6035737921906023,
						"acc_stderr,none": 0.012588033568434749,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6637988087359364,
						"acc_stderr,none": 0.01215708308123975,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5493050959629384,
						"acc_stderr,none": 0.012804412720126668,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6823295830575777,
						"acc_stderr,none": 0.011981108837175403,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5592322964923891,
						"acc_stderr,none": 0.01277651858633279,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.586366644606221,
						"acc_stderr,none": 0.012673714851823756,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6393117140966248,
						"acc_stderr,none": 0.012357592682139018,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.814340301191279,
						"acc_stderr,none": 0.035387810655407276,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8683870967741936,
						"acc_stderr,none": 0.007012741874121964,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109368,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7549530761209593,
						"acc_stderr,none": 0.01389638547259635,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7870722433460076,
						"acc_stderr,none": 0.02529139544566284,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6730158730158731,
						"acc_stderr,none": 0.026473487980890983,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7936507936507936,
						"acc_stderr,none": 0.018043971660827256,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/r3-testchunk-1-8-no-cuda-with-warmup_pth"
	},
	"./rwkv-x-dev/r3-testchunk-1-8_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6192220969560316,
						"acc_norm,none": 0.6068207440811725,
						"acc_norm_stderr,none": 0.09029126938044345,
						"acc_stderr,none": 0.10922070604528589,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3596875,
						"acc_stderr,none": 0.017435207683664012,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8337761194029851,
						"acc_stderr,none": 0.15989393277727873,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2939043343118633,
						"acc_norm,none": 0.2939043343118633,
						"acc_norm_stderr,none": 0.05578855423918061,
						"acc_stderr,none": 0.05578855423918061,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5442472606002858,
						"acc_stderr,none": 0.01296369391004516,
						"alias": "glue",
						"f1,none": 0.6674123884194917,
						"f1_stderr,none": 0.0002603236689035392,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.7089074325635553,
						"acc_stderr,none": 0.01604450406612138,
						"alias": "lambada",
						"perplexity,none": 3.8546991028590867,
						"perplexity_stderr,none": 0.2469929123955875
					},
					"lambada_multilingual": {
						"acc,none": 0.5358043857946827,
						"acc_stderr,none": 0.08162709055795155,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.032834691858042,
						"perplexity_stderr,none": 8.069924621452621
					},
					"mmlu": {
						"acc,none": 0.3159806295399516,
						"acc_stderr,none": 0.06084916761631225,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3015940488841658,
						"acc_stderr,none": 0.05053326614922218,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3678789829417444,
						"acc_stderr,none": 0.049239431873684125,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.333441663958401,
						"acc_stderr,none": 0.06443904150382021,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.26926736441484306,
						"acc_stderr,none": 0.05738163666360001,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4794285714285714,
						"acc_stderr,none": 0.05610864155487587,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7375032561682089,
						"acc_norm,none": 0.6116335336948645,
						"acc_norm_stderr,none": 0.010205420180856748,
						"acc_stderr,none": 0.15083398428974418,
						"alias": "pythia",
						"bits_per_byte,none": 0.6337843033903401,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5516297078300858,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3877282159499233,
						"perplexity_stderr,none": 0.06654038603061653,
						"word_perplexity,none": 10.476829420620149,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3170372472766921,
						"acc_stderr,none": 0.0015588211783750144,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3157894736842105,
						"bleu_acc_stderr,none": 0.01627228795791693,
						"bleu_diff,none": -7.567501136333758,
						"bleu_diff_stderr,none": 0.905469643847422,
						"bleu_max,none": 27.268726220046656,
						"bleu_max_stderr,none": 0.8186731988292876,
						"rouge1_acc,none": 0.2668298653610771,
						"rouge1_acc_stderr,none": 0.015483691939237263,
						"rouge1_diff,none": -10.454010940513674,
						"rouge1_diff_stderr,none": 0.9747078133897068,
						"rouge1_max,none": 51.82220985146748,
						"rouge1_max_stderr,none": 0.8979132328141808,
						"rouge2_acc,none": 0.24479804161566707,
						"rouge2_acc_stderr,none": 0.015051869486715014,
						"rouge2_diff,none": -12.457646353430821,
						"rouge2_diff_stderr,none": 1.1786180798188124,
						"rouge2_max,none": 35.603822044818465,
						"rouge2_max_stderr,none": 1.0564385410995856,
						"rougeL_acc,none": 0.27050183598531213,
						"rougeL_acc_stderr,none": 0.015550778332842883,
						"rougeL_diff,none": -10.679594014036027,
						"rougeL_diff_stderr,none": 0.9887547285344408,
						"rougeL_max,none": 49.13339680535411,
						"rougeL_max_stderr,none": 0.9179911867448477
					},
					"xcopa": {
						"acc,none": 0.6218181818181818,
						"acc_stderr,none": 0.07152267812287266,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43576974564926374,
						"acc_stderr,none": 0.050185786700428864,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6303471511942723,
						"acc_stderr,none": 0.05318402595248758,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8134412227466846,
						"acc_stderr,none": 0.0357797384536733,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6192220969560316,
						"acc_norm,none": 0.6068207440811725,
						"acc_norm_stderr,none": 0.09029126938044345,
						"acc_stderr,none": 0.10922070604528589,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3596875,
						"acc_stderr,none": 0.017435207683664012,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.378,
						"acc_stderr,none": 0.015341165254026647,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.361,
						"acc_stderr,none": 0.015195720118175125,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3433333333333333,
						"acc_stderr,none": 0.01371263383046586,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.38822525597269625,
						"acc_norm,none": 0.41638225255972694,
						"acc_norm_stderr,none": 0.014405618279436176,
						"acc_stderr,none": 0.014241614207414044,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7331649831649831,
						"acc_norm,none": 0.7007575757575758,
						"acc_norm_stderr,none": 0.009396447162309822,
						"acc_stderr,none": 0.009075915859267265,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8337761194029851,
						"acc_stderr,none": 0.15989393277727873,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139997,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098728,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469352,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662735,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.012953717566737234,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.581,
						"acc_stderr,none": 0.0156103389675778,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.796,
						"acc_stderr,none": 0.012749374359024387,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408016,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565643,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897897,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.0063463592930338465,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118744,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151101,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832022,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.0042063872496115,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662775,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.727,
						"acc_stderr,none": 0.014095022868717595,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.762,
						"acc_stderr,none": 0.013473586661967227,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.747,
						"acc_stderr,none": 0.01375427861358708,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.0076298239962803065,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.01104345769937823,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689097,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.338,
						"acc_stderr,none": 0.01496596071022447,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695798,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099192,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.648,
						"acc_stderr,none": 0.01511040450564866,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.012583693787968128,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.00785529793869759,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286429,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592083,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.713,
						"acc_stderr,none": 0.014312087053809963,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024945,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.483,
						"acc_stderr,none": 0.015810153729833434,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.631,
						"acc_stderr,none": 0.015266698139154614,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.621,
						"acc_stderr,none": 0.01534909100222535,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.971,
						"acc_stderr,none": 0.005309160685757007,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.822,
						"acc_stderr,none": 0.012102167676183587,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345696,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653893,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.775,
						"acc_stderr,none": 0.013211720158614751,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406088,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578247,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235254,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.015786868759359,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452373,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651537,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.719,
						"acc_stderr,none": 0.014221154708434935,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.486,
						"acc_stderr,none": 0.01581309754773099,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397243,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244064,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858924,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713329,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024987,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.822,
						"acc_stderr,none": 0.012102167676183589,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890132,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.0076298239962803134,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244026,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565873,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.971,
						"acc_stderr,none": 0.005309160685757,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.015356947477797585,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.312,
						"acc_stderr,none": 0.014658474370509005,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2939043343118633,
						"acc_norm,none": 0.2939043343118633,
						"acc_norm_stderr,none": 0.05578855423918061,
						"acc_stderr,none": 0.05578855423918061,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2781065088757396,
						"acc_norm,none": 0.2781065088757396,
						"acc_norm_stderr,none": 0.03456905430376245,
						"acc_stderr,none": 0.03456905430376245,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.28378378378378377,
						"acc_norm,none": 0.28378378378378377,
						"acc_norm_stderr,none": 0.03718409321285373,
						"acc_stderr,none": 0.03718409321285373,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.033047561588107864,
						"acc_stderr,none": 0.033047561588107864,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.39375,
						"acc_norm,none": 0.39375,
						"acc_norm_stderr,none": 0.038746956666858325,
						"acc_stderr,none": 0.038746956666858325,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624335,
						"acc_stderr,none": 0.03546563019624335,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3397129186602871,
						"acc_norm,none": 0.3397129186602871,
						"acc_norm_stderr,none": 0.03283906353745933,
						"acc_stderr,none": 0.03283906353745933,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.23125,
						"acc_norm,none": 0.23125,
						"acc_norm_stderr,none": 0.033437582657277434,
						"acc_stderr,none": 0.033437582657277434,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.29770992366412213,
						"acc_norm,none": 0.29770992366412213,
						"acc_norm_stderr,none": 0.04010358942462202,
						"acc_stderr,none": 0.04010358942462202,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3088235294117647,
						"acc_norm,none": 0.3088235294117647,
						"acc_norm_stderr,none": 0.03976333292288876,
						"acc_stderr,none": 0.03976333292288876,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.045223500773820285,
						"acc_stderr,none": 0.045223500773820285,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3219814241486068,
						"acc_norm,none": 0.3219814241486068,
						"acc_norm_stderr,none": 0.026038038744338663,
						"acc_stderr,none": 0.026038038744338663,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2696078431372549,
						"acc_norm,none": 0.2696078431372549,
						"acc_norm_stderr,none": 0.031145570659486782,
						"acc_stderr,none": 0.031145570659486782,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.29608938547486036,
						"acc_norm,none": 0.29608938547486036,
						"acc_norm_stderr,none": 0.03421843754304871,
						"acc_stderr,none": 0.03421843754304871,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.28270042194092826,
						"acc_norm,none": 0.28270042194092826,
						"acc_norm_stderr,none": 0.029312814153955934,
						"acc_stderr,none": 0.029312814153955934,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.18867924528301888,
						"acc_norm,none": 0.18867924528301888,
						"acc_norm_stderr,none": 0.0381824426969915,
						"acc_stderr,none": 0.0381824426969915,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.4205607476635514,
						"acc_norm,none": 0.4205607476635514,
						"acc_norm_stderr,none": 0.04794743635189597,
						"acc_stderr,none": 0.04794743635189597,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.37735849056603776,
						"acc_norm,none": 0.37735849056603776,
						"acc_norm_stderr,none": 0.04730439022852894,
						"acc_stderr,none": 0.04730439022852894,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2037037037037037,
						"acc_norm,none": 0.2037037037037037,
						"acc_norm_stderr,none": 0.038935425188248475,
						"acc_stderr,none": 0.038935425188248475,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.04176466758604901,
						"acc_stderr,none": 0.04176466758604901,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.041988576623712234,
						"acc_stderr,none": 0.041988576623712234,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2454212454212454,
						"acc_norm,none": 0.2454212454212454,
						"acc_norm_stderr,none": 0.02609299388422865,
						"acc_stderr,none": 0.02609299388422865,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.03320574612945431,
						"acc_stderr,none": 0.03320574612945431,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2807017543859649,
						"acc_norm,none": 0.2807017543859649,
						"acc_norm_stderr,none": 0.034462962170884265,
						"acc_stderr,none": 0.034462962170884265,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.03653847510896056,
						"acc_stderr,none": 0.03653847510896056,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.30935251798561153,
						"acc_norm,none": 0.30935251798561153,
						"acc_norm_stderr,none": 0.039347351125471115,
						"acc_stderr,none": 0.039347351125471115,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.34591194968553457,
						"acc_norm,none": 0.34591194968553457,
						"acc_norm_stderr,none": 0.037841848841408295,
						"acc_stderr,none": 0.037841848841408295,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.32515337423312884,
						"acc_norm,none": 0.32515337423312884,
						"acc_norm_stderr,none": 0.036803503712864616,
						"acc_stderr,none": 0.036803503712864616,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.03407826167337437,
						"acc_stderr,none": 0.03407826167337437,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.02839429305079051,
						"acc_stderr,none": 0.02839429305079051,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.030532892233932022,
						"acc_stderr,none": 0.030532892233932022,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.41596638655462187,
						"acc_norm,none": 0.41596638655462187,
						"acc_norm_stderr,none": 0.03201650100739614,
						"acc_stderr,none": 0.03201650100739614,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2391304347826087,
						"acc_norm,none": 0.2391304347826087,
						"acc_norm_stderr,none": 0.028187385293933952,
						"acc_stderr,none": 0.028187385293933952,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.037498507091740206,
						"acc_stderr,none": 0.037498507091740206,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3146853146853147,
						"acc_norm,none": 0.3146853146853147,
						"acc_norm_stderr,none": 0.03897077881510411,
						"acc_stderr,none": 0.03897077881510411,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03366618544627456,
						"acc_stderr,none": 0.03366618544627456,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.30201342281879195,
						"acc_norm,none": 0.30201342281879195,
						"acc_norm_stderr,none": 0.03774033930941344,
						"acc_stderr,none": 0.03774033930941344,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.03661433360410717,
						"acc_stderr,none": 0.03661433360410717,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2796610169491525,
						"acc_norm,none": 0.2796610169491525,
						"acc_norm_stderr,none": 0.04149459161011112,
						"acc_stderr,none": 0.04149459161011112,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.20909090909090908,
						"acc_norm,none": 0.20909090909090908,
						"acc_norm_stderr,none": 0.03895091015724138,
						"acc_stderr,none": 0.03895091015724138,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.03822127078536156,
						"acc_stderr,none": 0.03822127078536156,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.0404061017820884,
						"acc_stderr,none": 0.0404061017820884,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2810810810810811,
						"acc_norm,none": 0.2810810810810811,
						"acc_norm_stderr,none": 0.03313956873549873,
						"acc_stderr,none": 0.03313956873549873,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3430232558139535,
						"acc_norm,none": 0.3430232558139535,
						"acc_norm_stderr,none": 0.03630268317574837,
						"acc_stderr,none": 0.03630268317574837,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.27007299270072993,
						"acc_norm,none": 0.27007299270072993,
						"acc_norm_stderr,none": 0.021927461972871154,
						"acc_stderr,none": 0.021927461972871154,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3598130841121495,
						"acc_norm,none": 0.3598130841121495,
						"acc_norm_stderr,none": 0.03288531991318827,
						"acc_stderr,none": 0.03288531991318827,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.04119323030208568,
						"acc_stderr,none": 0.04119323030208568,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.36065573770491804,
						"acc_norm,none": 0.36065573770491804,
						"acc_norm_stderr,none": 0.043653706455668594,
						"acc_stderr,none": 0.043653706455668594,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03260773253630124,
						"acc_stderr,none": 0.03260773253630124,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.31666666666666665,
						"acc_norm,none": 0.31666666666666665,
						"acc_norm_stderr,none": 0.03476890096393038,
						"acc_stderr,none": 0.03476890096393038,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.32275132275132273,
						"acc_norm,none": 0.32275132275132273,
						"acc_norm_stderr,none": 0.03409802097064963,
						"acc_stderr,none": 0.03409802097064963,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.041265147363240995,
						"acc_stderr,none": 0.041265147363240995,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2827586206896552,
						"acc_norm,none": 0.2827586206896552,
						"acc_norm_stderr,none": 0.03752833958003337,
						"acc_stderr,none": 0.03752833958003337,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.04622501635210239,
						"acc_stderr,none": 0.04622501635210239,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.29714285714285715,
						"acc_norm,none": 0.29714285714285715,
						"acc_norm_stderr,none": 0.034645078898843724,
						"acc_stderr,none": 0.034645078898843724,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26066350710900477,
						"acc_norm,none": 0.26066350710900477,
						"acc_norm_stderr,none": 0.0302936456617428,
						"acc_stderr,none": 0.0302936456617428,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2526595744680851,
						"acc_norm,none": 0.2526595744680851,
						"acc_norm_stderr,none": 0.02243941258278639,
						"acc_stderr,none": 0.02243941258278639,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.36637931034482757,
						"acc_norm,none": 0.36637931034482757,
						"acc_norm_stderr,none": 0.031701087100596985,
						"acc_stderr,none": 0.031701087100596985,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3620689655172414,
						"acc_norm,none": 0.3620689655172414,
						"acc_norm_stderr,none": 0.036539236154659684,
						"acc_stderr,none": 0.036539236154659684,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.03712537833614866,
						"acc_stderr,none": 0.03712537833614866,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3230088495575221,
						"acc_norm,none": 0.3230088495575221,
						"acc_norm_stderr,none": 0.031175070714705388,
						"acc_stderr,none": 0.031175070714705388,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.28484848484848485,
						"acc_norm,none": 0.28484848484848485,
						"acc_norm_stderr,none": 0.03524390844511782,
						"acc_stderr,none": 0.03524390844511782,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2958579881656805,
						"acc_norm,none": 0.2958579881656805,
						"acc_norm_stderr,none": 0.035214144124964784,
						"acc_stderr,none": 0.035214144124964784,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3105590062111801,
						"acc_norm,none": 0.3105590062111801,
						"acc_norm_stderr,none": 0.03658142543288738,
						"acc_stderr,none": 0.03658142543288738,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.0348937065201876,
						"acc_stderr,none": 0.0348937065201876,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.03379976689896308,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5442472606002858,
						"acc_stderr,none": 0.01296369391004516,
						"alias": "glue",
						"f1,none": 0.6674123884194917,
						"f1_stderr,none": 0.0002603236689035392,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.5253933479386577,
						"acc_norm,none": 0.7091216889065923,
						"acc_norm_stderr,none": 0.00453239311124869,
						"acc_stderr,none": 0.004983342213776259,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7089074325635553,
						"acc_stderr,none": 0.01604450406612138,
						"alias": "lambada",
						"perplexity,none": 3.8546991028590867,
						"perplexity_stderr,none": 0.2469929123955875
					},
					"lambada_multilingual": {
						"acc,none": 0.5358043857946827,
						"acc_stderr,none": 0.08162709055795155,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.032834691858042,
						"perplexity_stderr,none": 8.069924621452621
					},
					"lambada_openai": {
						"acc,none": 0.7389869978653212,
						"acc_stderr,none": 0.00611873356162559,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3877282159499233,
						"perplexity_stderr,none": 0.06654038603061653
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4183970502619833,
						"acc_stderr,none": 0.006872578040273994,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.733869191804146,
						"perplexity_stderr,none": 1.9296550099544714
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7397632447118183,
						"acc_stderr,none": 0.0061128362944814826,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.386204559033709,
						"perplexity_stderr,none": 0.06648195751452507
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4539103434892296,
						"acc_stderr,none": 0.006936319475444719,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.549749360342716,
						"perplexity_stderr,none": 1.3920194695098518
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.551717446147875,
						"acc_stderr,none": 0.00692861373079625,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.619271766882466,
						"perplexity_stderr,none": 0.8042465423883304
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5152338443625073,
						"acc_stderr,none": 0.006962743717451539,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.875078581227154,
						"perplexity_stderr,none": 1.1529899993395805
					},
					"lambada_standard": {
						"acc,none": 0.6794100523966622,
						"acc_stderr,none": 0.006502090459040088,
						"alias": " - lambada_standard",
						"perplexity,none": 4.321462520717013,
						"perplexity_stderr,none": 0.09297548546485479
					},
					"logiqa": {
						"acc,none": 0.25960061443932414,
						"acc_norm,none": 0.29339477726574503,
						"acc_norm_stderr,none": 0.017859032704399504,
						"acc_stderr,none": 0.01719607000818003,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3159806295399516,
						"acc_stderr,none": 0.06084916761631225,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932268,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.3111111111111111,
						"acc_stderr,none": 0.03999262876617721,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.28289473684210525,
						"acc_stderr,none": 0.03665349695640767,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.39622641509433965,
						"acc_stderr,none": 0.030102793781791197,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.03852084696008534,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.37572254335260113,
						"acc_stderr,none": 0.036928207672648664,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.20588235294117646,
						"acc_stderr,none": 0.04023382273617747,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3148936170212766,
						"acc_stderr,none": 0.030363582197238174,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03947152782669415,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2827586206896552,
						"acc_stderr,none": 0.03752833958003336,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.021411684393694185,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.04040610178208839,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.36774193548387096,
						"acc_stderr,none": 0.027430866579973467,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.26108374384236455,
						"acc_stderr,none": 0.030903796952114482,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.40606060606060607,
						"acc_stderr,none": 0.03834816355401181,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4494949494949495,
						"acc_stderr,none": 0.0354413249194797,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.45595854922279794,
						"acc_stderr,none": 0.03594413711272438,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2948717948717949,
						"acc_stderr,none": 0.023119362758232297,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23703703703703705,
						"acc_stderr,none": 0.025928876132766097,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.24789915966386555,
						"acc_stderr,none": 0.028047967224176892,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2119205298013245,
						"acc_stderr,none": 0.03336767086567977,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3614678899082569,
						"acc_stderr,none": 0.020598082009937378,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.1712962962962963,
						"acc_stderr,none": 0.025695341643824688,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4215686274509804,
						"acc_stderr,none": 0.03465868196380757,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.379746835443038,
						"acc_stderr,none": 0.03159188752965852,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3991031390134529,
						"acc_stderr,none": 0.03286745312567961,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3511450381679389,
						"acc_stderr,none": 0.04186445163013751,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3015940488841658,
						"acc_stderr,none": 0.05053326614922218,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.33884297520661155,
						"acc_stderr,none": 0.043207678075366705,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04557239513497752,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2822085889570552,
						"acc_stderr,none": 0.03536117886664742,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841044,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4077669902912621,
						"acc_stderr,none": 0.04865777570410769,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3888888888888889,
						"acc_stderr,none": 0.031937057262002924,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.04960449637488584,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4112388250319285,
						"acc_stderr,none": 0.017595971908056573,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3179190751445087,
						"acc_stderr,none": 0.025070713719153172,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.026992544339297236,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3678789829417444,
						"acc_stderr,none": 0.049239431873684125,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3247588424437299,
						"acc_stderr,none": 0.026596782287697043,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.33024691358024694,
						"acc_stderr,none": 0.026168298456732846,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.24822695035460993,
						"acc_stderr,none": 0.0257700156442904,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2692307692307692,
						"acc_stderr,none": 0.011328734403140325,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.39338235294117646,
						"acc_stderr,none": 0.02967428828131118,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3022875816993464,
						"acc_stderr,none": 0.018579232711113877,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.0469237132203465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.22448979591836735,
						"acc_stderr,none": 0.02671143055553842,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.333441663958401,
						"acc_stderr,none": 0.06443904150382021,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.4427860696517413,
						"acc_stderr,none": 0.03512310964123937,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.26926736441484306,
						"acc_stderr,none": 0.05738163666360001,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3192771084337349,
						"acc_stderr,none": 0.036293353299478595,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.4269005847953216,
						"acc_stderr,none": 0.03793620616529917,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.34467651553744266,
						"acc_stderr,none": 0.004797452528332527,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.34479251423921886,
						"acc_stderr,none": 0.0047936849878533055,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7475490196078431,
						"acc_stderr,none": 0.02153332842706632,
						"alias": " - mrpc",
						"f1,none": 0.8393135725429017,
						"f1_stderr,none": 0.015620712370912481
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.07174515235457064,
						"exact_match_stderr,remove_whitespace": 0.0042957248192786925
					},
					"openbookqa": {
						"acc,none": 0.304,
						"acc_norm,none": 0.41,
						"acc_norm_stderr,none": 0.02201748257812767,
						"acc_stderr,none": 0.020591649571224932,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4355,
						"acc_stderr,none": 0.011089696374691104,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.376,
						"acc_stderr,none": 0.010833775211931946,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.404,
						"acc_stderr,none": 0.010975072943404662,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5495,
						"acc_stderr,none": 0.011128198119942877,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.553,
						"acc_stderr,none": 0.011120131683767742,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.521,
						"acc_stderr,none": 0.011173268141438293,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.517,
						"acc_stderr,none": 0.011176670299310671,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4794285714285714,
						"acc_stderr,none": 0.05610864155487587,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7731229597388466,
						"acc_norm,none": 0.780195865070729,
						"acc_norm_stderr,none": 0.00966195861665177,
						"acc_stderr,none": 0.009771584259215146,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7375032561682089,
						"acc_norm,none": 0.6116335336948645,
						"acc_norm_stderr,none": 0.010205420180856748,
						"acc_stderr,none": 0.15083398428974418,
						"alias": "pythia",
						"bits_per_byte,none": 0.6337843033903401,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5516297078300858,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3877282159499233,
						"perplexity_stderr,none": 0.06654038603061653,
						"word_perplexity,none": 10.476829420620149,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.499725425590335,
						"acc_stderr,none": 0.006765409531672773,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6366806826613901,
						"acc_stderr,none": 0.002391985715094573,
						"alias": " - qqp",
						"f1,none": 0.6659237189838295,
						"f1_stderr,none": 0.0025995851075444553
					},
					"record": {
						"alias": "record",
						"em,none": 0.2712,
						"em_stderr,none": 0.004446013124505282,
						"f1,none": 0.28122857168018817,
						"f1_stderr,none": 0.0044565614985110046
					},
					"rte": {
						"acc,none": 0.631768953068592,
						"acc_stderr,none": 0.029032524428023704,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.947,
						"acc_norm,none": 0.923,
						"acc_norm_stderr,none": 0.008434580140240646,
						"acc_stderr,none": 0.007088105617246446,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9208715596330275,
						"acc_stderr,none": 0.009146538264185714,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3170372472766921,
						"acc_stderr,none": 0.0015588211783750144,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3157894736842105,
						"bleu_acc_stderr,none": 0.01627228795791693,
						"bleu_diff,none": -7.567501136333758,
						"bleu_diff_stderr,none": 0.905469643847422,
						"bleu_max,none": 27.268726220046656,
						"bleu_max_stderr,none": 0.8186731988292876,
						"rouge1_acc,none": 0.2668298653610771,
						"rouge1_acc_stderr,none": 0.015483691939237263,
						"rouge1_diff,none": -10.454010940513674,
						"rouge1_diff_stderr,none": 0.9747078133897068,
						"rouge1_max,none": 51.82220985146748,
						"rouge1_max_stderr,none": 0.8979132328141808,
						"rouge2_acc,none": 0.24479804161566707,
						"rouge2_acc_stderr,none": 0.015051869486715014,
						"rouge2_diff,none": -12.457646353430821,
						"rouge2_diff_stderr,none": 1.1786180798188124,
						"rouge2_max,none": 35.603822044818465,
						"rouge2_max_stderr,none": 1.0564385410995856,
						"rougeL_acc,none": 0.27050183598531213,
						"rougeL_acc_stderr,none": 0.015550778332842883,
						"rougeL_diff,none": -10.679594014036027,
						"rougeL_diff_stderr,none": 0.9887547285344408,
						"rougeL_max,none": 49.13339680535411,
						"rougeL_max_stderr,none": 0.9179911867448477
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3157894736842105,
						"bleu_acc_stderr,none": 0.01627228795791693,
						"bleu_diff,none": -7.567501136333758,
						"bleu_diff_stderr,none": 0.905469643847422,
						"bleu_max,none": 27.268726220046656,
						"bleu_max_stderr,none": 0.8186731988292876,
						"rouge1_acc,none": 0.2668298653610771,
						"rouge1_acc_stderr,none": 0.015483691939237263,
						"rouge1_diff,none": -10.454010940513674,
						"rouge1_diff_stderr,none": 0.9747078133897068,
						"rouge1_max,none": 51.82220985146748,
						"rouge1_max_stderr,none": 0.8979132328141808,
						"rouge2_acc,none": 0.24479804161566707,
						"rouge2_acc_stderr,none": 0.015051869486715014,
						"rouge2_diff,none": -12.457646353430821,
						"rouge2_diff_stderr,none": 1.1786180798188124,
						"rouge2_max,none": 35.603822044818465,
						"rouge2_max_stderr,none": 1.0564385410995856,
						"rougeL_acc,none": 0.27050183598531213,
						"rougeL_acc_stderr,none": 0.015550778332842883,
						"rougeL_diff,none": -10.679594014036027,
						"rougeL_diff_stderr,none": 0.9887547285344408,
						"rougeL_max,none": 49.13339680535411,
						"rougeL_max_stderr,none": 0.9179911867448477
					},
					"truthfulqa_mc1": {
						"acc,none": 0.24357405140758873,
						"acc_stderr,none": 0.01502635482491078,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.39050044314579546,
						"acc_stderr,none": 0.013857628797468121,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6337843033903401,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5516297078300858,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.476829420620149,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6795580110497238,
						"acc_stderr,none": 0.013115085457681705,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4084507042253521,
						"acc_stderr,none": 0.058751136942575256,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.04770204856076104,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6218181818181818,
						"acc_stderr,none": 0.07152267812287266,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.022094713229761784,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.022342748192502846,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.019874354831287497,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.01968468882019472,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928028,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228137,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.646,
						"acc_stderr,none": 0.021407582047916447,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.020395095484936607,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.02059164957122493,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43576974564926374,
						"acc_stderr,none": 0.050185786700428864,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.00946022348499647,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.47309236947791167,
						"acc_stderr,none": 0.010007549970702514,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4855421686746988,
						"acc_stderr,none": 0.010017882185606005,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39156626506024095,
						"acc_stderr,none": 0.009783558109997096,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5273092369477912,
						"acc_stderr,none": 0.010007112889731993,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4983935742971888,
						"acc_stderr,none": 0.01002202114110211,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.493574297188755,
						"acc_stderr,none": 0.010021245217159393,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43413654618473896,
						"acc_stderr,none": 0.009934740969162527,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4855421686746988,
						"acc_stderr,none": 0.010017882185606007,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39116465863453814,
						"acc_stderr,none": 0.009781766322010004,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40562248995983935,
						"acc_stderr,none": 0.009841918156163159,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.45140562248995986,
						"acc_stderr,none": 0.00997462804772198,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41124497991967873,
						"acc_stderr,none": 0.00986291222354464,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41044176706827307,
						"acc_stderr,none": 0.00985999467258512,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.342570281124498,
						"acc_stderr,none": 0.00951233331947037,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6303471511942723,
						"acc_stderr,none": 0.05318402595248758,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5982792852415619,
						"acc_stderr,none": 0.012616114526927914,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7796161482461945,
						"acc_stderr,none": 0.010666988429058747,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.71409662475182,
						"acc_stderr,none": 0.011627856346940623,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5638649900727994,
						"acc_stderr,none": 0.012761730431435763,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.600264725347452,
						"acc_stderr,none": 0.01260576407762715,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6631369953673064,
						"acc_stderr,none": 0.01216297499613638,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5479814692256784,
						"acc_stderr,none": 0.012807742345189275,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6803441429516877,
						"acc_stderr,none": 0.012000993063297275,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5585704831237591,
						"acc_stderr,none": 0.012778538985880637,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5896757114493713,
						"acc_stderr,none": 0.012658485800663402,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6379880873593646,
						"acc_stderr,none": 0.01236742376945643,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8134412227466846,
						"acc_stderr,none": 0.0357797384536733,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8696774193548387,
						"acc_stderr,none": 0.00698346355150456,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109368,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7570385818561001,
						"acc_stderr,none": 0.013856224434217376,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7718631178707225,
						"acc_stderr,none": 0.02592490955924428,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6698412698412698,
						"acc_stderr,none": 0.0265388756462877,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7857142857142857,
						"acc_stderr,none": 0.01829552775577619,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/r3-testchunk-1-8_pth"
	},
	"./rwkv-x-dev/r3-testchunk-blink_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6245772266065389,
						"acc_norm,none": 0.6104847801578354,
						"acc_norm_stderr,none": 0.0919981175048155,
						"acc_stderr,none": 0.10972794100063055,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.34875,
						"acc_stderr,none": 0.015247578786769772,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8255373134328359,
						"acc_stderr,none": 0.15585788738158765,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.29088240372992574,
						"acc_norm,none": 0.29088240372992574,
						"acc_norm_stderr,none": 0.053847106660769854,
						"acc_stderr,none": 0.053847106660769854,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5325899237732253,
						"acc_stderr,none": 0.012725141206705074,
						"alias": "glue",
						"f1,none": 0.6597272024892974,
						"f1_stderr,none": 0.00029291856934484116,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.7101688336891131,
						"acc_stderr,none": 0.016802234569798787,
						"alias": "lambada",
						"perplexity,none": 3.8866065452000655,
						"perplexity_stderr,none": 0.25118906168108124
					},
					"lambada_multilingual": {
						"acc,none": 0.536541820298855,
						"acc_stderr,none": 0.0849911835914866,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.133068390479092,
						"perplexity_stderr,none": 8.259890308339713
					},
					"mmlu": {
						"acc,none": 0.3046574562028201,
						"acc_stderr,none": 0.05903992247693278,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.29436769394261425,
						"acc_stderr,none": 0.0528372493870733,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3601544898616029,
						"acc_stderr,none": 0.04733464951680367,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.31199220019499513,
						"acc_stderr,none": 0.053246591909246396,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2581668252457976,
						"acc_stderr,none": 0.05829306180032575,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.48264285714285715,
						"acc_stderr,none": 0.05678667549325263,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7300717281337995,
						"acc_norm,none": 0.614953662564991,
						"acc_norm_stderr,none": 0.010395090319449005,
						"acc_stderr,none": 0.1471870563603244,
						"alias": "pythia",
						"bits_per_byte,none": 0.6339128632187685,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5517679810835663,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.409822711813673,
						"perplexity_stderr,none": 0.06710860177590683,
						"word_perplexity,none": 10.481822987614866,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3164911544193211,
						"acc_stderr,none": 0.0015848621836507032,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.01633917037328091,
						"bleu_diff,none": -7.74965621112687,
						"bleu_diff_stderr,none": 0.8908333788117159,
						"bleu_max,none": 27.643256907666103,
						"bleu_max_stderr,none": 0.8187162641684086,
						"rouge1_acc,none": 0.28151774785801714,
						"rouge1_acc_stderr,none": 0.01574402724825605,
						"rouge1_diff,none": -9.621757457397658,
						"rouge1_diff_stderr,none": 0.9524042925993831,
						"rouge1_max,none": 52.70388055745965,
						"rouge1_max_stderr,none": 0.8835958770244925,
						"rouge2_acc,none": 0.25703794369645044,
						"rouge2_acc_stderr,none": 0.015298077509485083,
						"rouge2_diff,none": -11.717572069837749,
						"rouge2_diff_stderr,none": 1.1578435886750256,
						"rouge2_max,none": 36.58364933117119,
						"rouge2_max_stderr,none": 1.0455110530341567,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.015764770836777308,
						"rougeL_diff,none": -10.00717961692408,
						"rougeL_diff_stderr,none": 0.9684057908961523,
						"rougeL_max,none": 49.93512143971857,
						"rougeL_max_stderr,none": 0.9035769686215327
					},
					"xcopa": {
						"acc,none": 0.6205454545454545,
						"acc_stderr,none": 0.06678889272508182,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4352342704149933,
						"acc_stderr,none": 0.04719083714111022,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6286625353468503,
						"acc_stderr,none": 0.06173884837715778,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8109687570240504,
						"acc_stderr,none": 0.03731162075503134,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6245772266065389,
						"acc_norm,none": 0.6104847801578354,
						"acc_norm_stderr,none": 0.0919981175048155,
						"acc_stderr,none": 0.10972794100063055,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.34875,
						"acc_stderr,none": 0.015247578786769772,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.015167928865407559,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.347,
						"acc_stderr,none": 0.015060472031706622,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3425,
						"acc_stderr,none": 0.013704669762934725,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3924914675767918,
						"acc_norm,none": 0.41638225255972694,
						"acc_norm_stderr,none": 0.014405618279436176,
						"acc_stderr,none": 0.014269634635670698,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7390572390572391,
						"acc_norm,none": 0.7062289562289562,
						"acc_norm_stderr,none": 0.009346423298166727,
						"acc_stderr,none": 0.009011142493235974,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8255373134328359,
						"acc_stderr,none": 0.15585788738158765,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491129,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329822,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731979,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662742,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.772,
						"acc_stderr,none": 0.013273740700804478,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.562,
						"acc_stderr,none": 0.01569721001969469,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.012864077288499327,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378206,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987286,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689103,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.00791034598317755,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817151,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118728,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.00969892102602496,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919303,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178334,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.010131468138756986,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.014606483127342761,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.014174516461485254,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.756,
						"acc_stderr,none": 0.013588548437881421,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.01104345769937823,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.00396985639031942,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.014876872027456736,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248111,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366667,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.603,
						"acc_stderr,none": 0.015480007449307992,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.769,
						"acc_stderr,none": 0.013334797216936428,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244083,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.01005510343582333,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280308,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651535,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.014632638658632896,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713329,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.446,
						"acc_stderr,none": 0.015726771166750357,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.627,
						"acc_stderr,none": 0.015300493622922809,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.015625625112620663,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275288,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425672,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103287,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103315,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.768,
						"acc_stderr,none": 0.013354937452281569,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.0069604200625714005,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.001000000000000009,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.010640169792499345,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.533,
						"acc_stderr,none": 0.015784807891138782,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426114,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785133,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565734,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.014013292702729463,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.475,
						"acc_stderr,none": 0.01579951342999602,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823335,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315151,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.594,
						"acc_stderr,none": 0.015537226438634593,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946099,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397243,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.01255952792670739,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.01162816469672718,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919291,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118585,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178369,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695473,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.364,
						"acc_stderr,none": 0.015222868840522022,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.321,
						"acc_stderr,none": 0.014770821817934645,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.29088240372992574,
						"acc_norm,none": 0.29088240372992574,
						"acc_norm_stderr,none": 0.053847106660769854,
						"acc_stderr,none": 0.053847106660769854,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.30405405405405406,
						"acc_norm,none": 0.30405405405405406,
						"acc_norm_stderr,none": 0.03794062549620372,
						"acc_stderr,none": 0.03794062549620372,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.21951219512195122,
						"acc_norm,none": 0.21951219512195122,
						"acc_norm_stderr,none": 0.03242041613395385,
						"acc_stderr,none": 0.03242041613395385,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.3625,
						"acc_norm,none": 0.3625,
						"acc_norm_stderr,none": 0.0381237434064489,
						"acc_stderr,none": 0.0381237434064489,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624335,
						"acc_stderr,none": 0.03546563019624335,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.33014354066985646,
						"acc_norm,none": 0.33014354066985646,
						"acc_norm_stderr,none": 0.03260698244181308,
						"acc_stderr,none": 0.03260698244181308,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865142,
						"acc_stderr,none": 0.03462157845865142,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.29770992366412213,
						"acc_norm,none": 0.29770992366412213,
						"acc_norm_stderr,none": 0.040103589424622034,
						"acc_stderr,none": 0.040103589424622034,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3161764705882353,
						"acc_norm,none": 0.3161764705882353,
						"acc_norm_stderr,none": 0.040019338846834944,
						"acc_stderr,none": 0.040019338846834944,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3364485981308411,
						"acc_norm,none": 0.3364485981308411,
						"acc_norm_stderr,none": 0.045892711114716274,
						"acc_stderr,none": 0.045892711114716274,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3219814241486068,
						"acc_norm,none": 0.3219814241486068,
						"acc_norm_stderr,none": 0.02603803874433866,
						"acc_stderr,none": 0.02603803874433866,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604246,
						"acc_stderr,none": 0.030587591351604246,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3128491620111732,
						"acc_norm,none": 0.3128491620111732,
						"acc_norm_stderr,none": 0.034752298755111065,
						"acc_stderr,none": 0.034752298755111065,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.0283046579430353,
						"acc_stderr,none": 0.0283046579430353,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.16981132075471697,
						"acc_norm,none": 0.16981132075471697,
						"acc_norm_stderr,none": 0.036641823111517896,
						"acc_stderr,none": 0.036641823111517896,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.411214953271028,
						"acc_norm,none": 0.411214953271028,
						"acc_norm_stderr,none": 0.04779251692801369,
						"acc_stderr,none": 0.04779251692801369,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3867924528301887,
						"acc_norm,none": 0.3867924528301887,
						"acc_norm_stderr,none": 0.04752784159123843,
						"acc_stderr,none": 0.04752784159123843,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2037037037037037,
						"acc_norm,none": 0.2037037037037037,
						"acc_norm_stderr,none": 0.03893542518824847,
						"acc_stderr,none": 0.03893542518824847,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.041429720078003766,
						"acc_stderr,none": 0.041429720078003766,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.26373626373626374,
						"acc_norm,none": 0.26373626373626374,
						"acc_norm_stderr,none": 0.02671881407296754,
						"acc_stderr,none": 0.02671881407296754,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.03283472056108567,
						"acc_stderr,none": 0.03283472056108567,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.033773102522091945,
						"acc_stderr,none": 0.033773102522091945,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.03653847510896056,
						"acc_stderr,none": 0.03653847510896056,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.30935251798561153,
						"acc_norm,none": 0.30935251798561153,
						"acc_norm_stderr,none": 0.03934735112547112,
						"acc_stderr,none": 0.03934735112547112,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.037676093121953455,
						"acc_stderr,none": 0.037676093121953455,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3067484662576687,
						"acc_norm,none": 0.3067484662576687,
						"acc_norm_stderr,none": 0.03623089915724147,
						"acc_stderr,none": 0.03623089915724147,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.03430085607014882,
						"acc_stderr,none": 0.03430085607014882,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.028271399816988545,
						"acc_stderr,none": 0.028271399816988545,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.25252525252525254,
						"acc_norm,none": 0.25252525252525254,
						"acc_norm_stderr,none": 0.030954055470365897,
						"acc_stderr,none": 0.030954055470365897,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.39915966386554624,
						"acc_norm,none": 0.39915966386554624,
						"acc_norm_stderr,none": 0.03181110032413926,
						"acc_stderr,none": 0.03181110032413926,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24782608695652175,
						"acc_norm,none": 0.24782608695652175,
						"acc_norm_stderr,none": 0.028530862595410073,
						"acc_stderr,none": 0.028530862595410073,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.32867132867132864,
						"acc_norm,none": 0.32867132867132864,
						"acc_norm_stderr,none": 0.0394188850126319,
						"acc_stderr,none": 0.0394188850126319,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26136363636363635,
						"acc_norm,none": 0.26136363636363635,
						"acc_norm_stderr,none": 0.033213825516355905,
						"acc_stderr,none": 0.033213825516355905,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2751677852348993,
						"acc_norm,none": 0.2751677852348993,
						"acc_norm_stderr,none": 0.036710194033425625,
						"acc_stderr,none": 0.036710194033425625,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.033071627503231775,
						"acc_stderr,none": 0.033071627503231775,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.03661433360410718,
						"acc_stderr,none": 0.03661433360410718,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.288135593220339,
						"acc_norm,none": 0.288135593220339,
						"acc_norm_stderr,none": 0.04187011593049809,
						"acc_stderr,none": 0.04187011593049809,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.03334645408665338,
						"acc_stderr,none": 0.03334645408665338,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.040139645540727735,
						"acc_stderr,none": 0.040139645540727735,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.27972027972027974,
						"acc_norm,none": 0.27972027972027974,
						"acc_norm_stderr,none": 0.037667638895398536,
						"acc_stderr,none": 0.037667638895398536,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2698412698412698,
						"acc_norm,none": 0.2698412698412698,
						"acc_norm_stderr,none": 0.03970158273235173,
						"acc_stderr,none": 0.03970158273235173,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.033695536918777184,
						"acc_stderr,none": 0.033695536918777184,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.31976744186046513,
						"acc_norm,none": 0.31976744186046513,
						"acc_norm_stderr,none": 0.035665455380848116,
						"acc_stderr,none": 0.035665455380848116,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2749391727493917,
						"acc_norm,none": 0.2749391727493917,
						"acc_norm_stderr,none": 0.022050254355995086,
						"acc_stderr,none": 0.022050254355995086,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3598130841121495,
						"acc_norm,none": 0.3598130841121495,
						"acc_norm_stderr,none": 0.03288531991318828,
						"acc_stderr,none": 0.03288531991318828,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3252032520325203,
						"acc_norm,none": 0.3252032520325203,
						"acc_norm_stderr,none": 0.042411537335732975,
						"acc_stderr,none": 0.042411537335732975,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3524590163934426,
						"acc_norm,none": 0.3524590163934426,
						"acc_norm_stderr,none": 0.0434305428342706,
						"acc_stderr,none": 0.0434305428342706,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.319047619047619,
						"acc_norm,none": 0.319047619047619,
						"acc_norm_stderr,none": 0.03224133248962466,
						"acc_stderr,none": 0.03224133248962466,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3277777777777778,
						"acc_norm,none": 0.3277777777777778,
						"acc_norm_stderr,none": 0.03508485373860692,
						"acc_stderr,none": 0.03508485373860692,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31746031746031744,
						"acc_norm,none": 0.31746031746031744,
						"acc_norm_stderr,none": 0.03394921616447879,
						"acc_stderr,none": 0.03394921616447879,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.04083221538649575,
						"acc_stderr,none": 0.04083221538649575,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.038552896163789485,
						"acc_stderr,none": 0.038552896163789485,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.045521571818039494,
						"acc_stderr,none": 0.045521571818039494,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26857142857142857,
						"acc_norm,none": 0.26857142857142857,
						"acc_norm_stderr,none": 0.033600151915923894,
						"acc_stderr,none": 0.033600151915923894,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26066350710900477,
						"acc_norm,none": 0.26066350710900477,
						"acc_norm_stderr,none": 0.030293645661742804,
						"acc_stderr,none": 0.030293645661742804,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2579787234042553,
						"acc_norm,none": 0.2579787234042553,
						"acc_norm_stderr,none": 0.022593550801056263,
						"acc_stderr,none": 0.022593550801056263,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3577586206896552,
						"acc_norm,none": 0.3577586206896552,
						"acc_norm_stderr,none": 0.0315382945960225,
						"acc_stderr,none": 0.0315382945960225,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.03613730415279119,
						"acc_stderr,none": 0.03613730415279119,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.037498507091740206,
						"acc_stderr,none": 0.037498507091740206,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3008849557522124,
						"acc_norm,none": 0.3008849557522124,
						"acc_norm_stderr,none": 0.030576185297580976,
						"acc_stderr,none": 0.030576185297580976,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.03567969772268047,
						"acc_stderr,none": 0.03567969772268047,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.28994082840236685,
						"acc_norm,none": 0.28994082840236685,
						"acc_norm_stderr,none": 0.03500638924911012,
						"acc_stderr,none": 0.03500638924911012,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3105590062111801,
						"acc_norm,none": 0.3105590062111801,
						"acc_norm_stderr,none": 0.036581425432887386,
						"acc_stderr,none": 0.036581425432887386,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.035156741348767645,
						"acc_stderr,none": 0.035156741348767645,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.03379976689896308,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5325899237732253,
						"acc_stderr,none": 0.012725141206705074,
						"alias": "glue",
						"f1,none": 0.6597272024892974,
						"f1_stderr,none": 0.00029291856934484116,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.5259908384783908,
						"acc_norm,none": 0.708922525393348,
						"acc_norm_stderr,none": 0.004533307758521346,
						"acc_stderr,none": 0.004983035420235716,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7101688336891131,
						"acc_stderr,none": 0.016802234569798787,
						"alias": "lambada",
						"perplexity,none": 3.8866065452000655,
						"perplexity_stderr,none": 0.25118906168108124
					},
					"lambada_multilingual": {
						"acc,none": 0.536541820298855,
						"acc_stderr,none": 0.0849911835914866,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.133068390479092,
						"perplexity_stderr,none": 8.259890308339713
					},
					"lambada_openai": {
						"acc,none": 0.7407335532699398,
						"acc_stderr,none": 0.006105429762071469,
						"alias": " - lambada_openai",
						"perplexity,none": 3.409822711813673,
						"perplexity_stderr,none": 0.06710860177590683
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.41956142053172907,
						"acc_stderr,none": 0.0068752418779180514,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.98610008504293,
						"perplexity_stderr,none": 1.934866280206218
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7413157384048127,
						"acc_stderr,none": 0.006100967149142441,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.412073810064842,
						"perplexity_stderr,none": 0.06724170581992635
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45701533087521834,
						"acc_stderr,none": 0.006940188097931741,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 28.408947617297116,
						"perplexity_stderr,none": 1.3907402174724526
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5495827673200078,
						"acc_stderr,none": 0.006931642009240902,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.76975720394861,
						"perplexity_stderr,none": 0.8120888323472185
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5152338443625073,
						"acc_stderr,none": 0.006962743717451542,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.08846323604198,
						"perplexity_stderr,none": 1.1658353063802551
					},
					"lambada_standard": {
						"acc,none": 0.6790219289734135,
						"acc_stderr,none": 0.006504166559764681,
						"alias": " - lambada_standard",
						"perplexity,none": 4.361566274329099,
						"perplexity_stderr,none": 0.09417215942086928
					},
					"logiqa": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.29185867895545314,
						"acc_norm_stderr,none": 0.017831570553971925,
						"acc_stderr,none": 0.017162894755127063,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3046574562028201,
						"acc_stderr,none": 0.05903992247693278,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04072314811876837,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.27631578947368424,
						"acc_stderr,none": 0.03639057569952925,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.37358490566037733,
						"acc_stderr,none": 0.029773082713319875,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.03852084696008534,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3583815028901734,
						"acc_stderr,none": 0.036563436533531585,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.17647058823529413,
						"acc_stderr,none": 0.0379328118530781,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2978723404255319,
						"acc_stderr,none": 0.029896145682095455,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21929824561403508,
						"acc_stderr,none": 0.03892431106518753,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2896551724137931,
						"acc_stderr,none": 0.03780019230438014,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.20634920634920634,
						"acc_stderr,none": 0.02084229093011466,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.040406101782088394,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.35161290322580646,
						"acc_stderr,none": 0.027162537826948458,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2019704433497537,
						"acc_stderr,none": 0.02824735012218027,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.41818181818181815,
						"acc_stderr,none": 0.03851716319398393,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3686868686868687,
						"acc_stderr,none": 0.034373055019806184,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.37305699481865284,
						"acc_stderr,none": 0.03490205592048574,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.28974358974358977,
						"acc_stderr,none": 0.023000628243687968,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.21851851851851853,
						"acc_stderr,none": 0.025195752251823793,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.226890756302521,
						"acc_stderr,none": 0.027205371538279483,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2251655629139073,
						"acc_stderr,none": 0.03410435282008936,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.326605504587156,
						"acc_stderr,none": 0.0201069908899373,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_stderr,none": 0.025416428388767478,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4264705882352941,
						"acc_stderr,none": 0.034711579079534254,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3628691983122363,
						"acc_stderr,none": 0.031299208255302136,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3721973094170404,
						"acc_stderr,none": 0.03244305283008731,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.40458015267175573,
						"acc_stderr,none": 0.043046937953806645,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.29436769394261425,
						"acc_stderr,none": 0.0528372493870733,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.33884297520661155,
						"acc_stderr,none": 0.043207678075366705,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.043300437496507437,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.22699386503067484,
						"acc_stderr,none": 0.032910995786157686,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.044328040552915185,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4174757281553398,
						"acc_stderr,none": 0.048828405482122375,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.39316239316239315,
						"acc_stderr,none": 0.03199957924651047,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4163473818646232,
						"acc_stderr,none": 0.017627948030430298,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3208092485549133,
						"acc_stderr,none": 0.025131000233647907,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.30392156862745096,
						"acc_stderr,none": 0.026336613469046633,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3601544898616029,
						"acc_stderr,none": 0.04733464951680367,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2958199356913183,
						"acc_stderr,none": 0.025922371788818788,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.02622964917882116,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.24822695035460993,
						"acc_stderr,none": 0.025770015644290385,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.26010430247718386,
						"acc_stderr,none": 0.011204382887823822,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.35661764705882354,
						"acc_stderr,none": 0.02909720956841195,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.29248366013071897,
						"acc_stderr,none": 0.018403415710109786,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.0469237132203465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.22857142857142856,
						"acc_stderr,none": 0.02688214492230774,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.31199220019499513,
						"acc_stderr,none": 0.053246591909246396,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.40298507462686567,
						"acc_stderr,none": 0.034683432951111266,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2581668252457976,
						"acc_stderr,none": 0.05829306180032575,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.03680783690727581,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.4444444444444444,
						"acc_stderr,none": 0.0381107966983353,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3325522159959246,
						"acc_stderr,none": 0.004755711973595552,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.33390968266883647,
						"acc_stderr,none": 0.004756442145845232,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7524509803921569,
						"acc_stderr,none": 0.02139304018372111,
						"alias": " - mrpc",
						"f1,none": 0.8424336973478939,
						"f1_stderr,none": 0.015487371449260464
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.07867036011080332,
						"exact_match_stderr,remove_whitespace": 0.004481461291101448
					},
					"openbookqa": {
						"acc,none": 0.3,
						"acc_norm,none": 0.414,
						"acc_norm_stderr,none": 0.022049497969827865,
						"acc_stderr,none": 0.02051442622562804,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4465,
						"acc_stderr,none": 0.011118933867290117,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3615,
						"acc_stderr,none": 0.0107455389955159,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.423,
						"acc_stderr,none": 0.011049730687855397,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.01113040061763076,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.011112774040420288,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5285,
						"acc_stderr,none": 0.011164954236428794,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5145,
						"acc_stderr,none": 0.011178432523249468,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48264285714285715,
						"acc_stderr,none": 0.05678667549325263,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7693144722524483,
						"acc_norm,none": 0.7752992383025027,
						"acc_norm_stderr,none": 0.009738282586548373,
						"acc_stderr,none": 0.009828959550983089,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7300717281337995,
						"acc_norm,none": 0.614953662564991,
						"acc_norm_stderr,none": 0.010395090319449005,
						"acc_stderr,none": 0.1471870563603244,
						"alias": "pythia",
						"bits_per_byte,none": 0.6339128632187685,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5517679810835663,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.409822711813673,
						"perplexity_stderr,none": 0.06710860177590683,
						"word_perplexity,none": 10.481822987614866,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.5015559216547685,
						"acc_stderr,none": 0.006765377795038136,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6229532525352461,
						"acc_stderr,none": 0.0024103428890995004,
						"alias": " - qqp",
						"f1,none": 0.6581449587369932,
						"f1_stderr,none": 0.0026024754097340565
					},
					"record": {
						"alias": "record",
						"em,none": 0.2757,
						"em_stderr,none": 0.004468886672853911,
						"f1,none": 0.2856752380952377,
						"f1_stderr,none": 0.0044784358652582715
					},
					"rte": {
						"acc,none": 0.631768953068592,
						"acc_stderr,none": 0.029032524428023707,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.944,
						"acc_norm,none": 0.915,
						"acc_norm_stderr,none": 0.008823426366942277,
						"acc_stderr,none": 0.007274401481697052,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9025229357798165,
						"acc_stderr,none": 0.010050120445908327,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3164911544193211,
						"acc_stderr,none": 0.0015848621836507032,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.01633917037328091,
						"bleu_diff,none": -7.74965621112687,
						"bleu_diff_stderr,none": 0.8908333788117159,
						"bleu_max,none": 27.643256907666103,
						"bleu_max_stderr,none": 0.8187162641684086,
						"rouge1_acc,none": 0.28151774785801714,
						"rouge1_acc_stderr,none": 0.01574402724825605,
						"rouge1_diff,none": -9.621757457397658,
						"rouge1_diff_stderr,none": 0.9524042925993831,
						"rouge1_max,none": 52.70388055745965,
						"rouge1_max_stderr,none": 0.8835958770244925,
						"rouge2_acc,none": 0.25703794369645044,
						"rouge2_acc_stderr,none": 0.015298077509485083,
						"rouge2_diff,none": -11.717572069837749,
						"rouge2_diff_stderr,none": 1.1578435886750256,
						"rouge2_max,none": 36.58364933117119,
						"rouge2_max_stderr,none": 1.0455110530341567,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.015764770836777308,
						"rougeL_diff,none": -10.00717961692408,
						"rougeL_diff_stderr,none": 0.9684057908961523,
						"rougeL_max,none": 49.93512143971857,
						"rougeL_max_stderr,none": 0.9035769686215327
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.01633917037328091,
						"bleu_diff,none": -7.74965621112687,
						"bleu_diff_stderr,none": 0.8908333788117159,
						"bleu_max,none": 27.643256907666103,
						"bleu_max_stderr,none": 0.8187162641684086,
						"rouge1_acc,none": 0.28151774785801714,
						"rouge1_acc_stderr,none": 0.01574402724825605,
						"rouge1_diff,none": -9.621757457397658,
						"rouge1_diff_stderr,none": 0.9524042925993831,
						"rouge1_max,none": 52.70388055745965,
						"rouge1_max_stderr,none": 0.8835958770244925,
						"rouge2_acc,none": 0.25703794369645044,
						"rouge2_acc_stderr,none": 0.015298077509485083,
						"rouge2_diff,none": -11.717572069837749,
						"rouge2_diff_stderr,none": 1.1578435886750256,
						"rouge2_max,none": 36.58364933117119,
						"rouge2_max_stderr,none": 1.0455110530341567,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.015764770836777308,
						"rougeL_diff,none": -10.00717961692408,
						"rougeL_diff_stderr,none": 0.9684057908961523,
						"rougeL_max,none": 49.93512143971857,
						"rougeL_max_stderr,none": 0.9035769686215327
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2423500611995104,
						"acc_stderr,none": 0.015000674373570345,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3906322476391317,
						"acc_stderr,none": 0.01395805563894787,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6339128632187685,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5517679810835663,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.481822987614866,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6764009471191792,
						"acc_stderr,none": 0.013148883320923146,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.43661971830985913,
						"acc_stderr,none": 0.0592793555841297,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5961538461538461,
						"acc_stderr,none": 0.04834688952654019,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6205454545454545,
						"acc_stderr,none": 0.06678889272508182,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.022049497969827865,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668086,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.02009995064750323,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.019684688820194723,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.022279694107843428,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.64,
						"acc_stderr,none": 0.021487751089720522,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.020313179231745197,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.02074059653648807,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4352342704149933,
						"acc_stderr,none": 0.04719083714111022,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337349,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.47630522088353416,
						"acc_stderr,none": 0.010010812905412062,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4895582329317269,
						"acc_stderr,none": 0.01001988720567743,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39477911646586344,
						"acc_stderr,none": 0.00979764252169086,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5236947791164659,
						"acc_stderr,none": 0.010010812905412057,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4867469879518072,
						"acc_stderr,none": 0.010018551648218462,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.491566265060241,
						"acc_stderr,none": 0.010020647068114175,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43172690763052207,
						"acc_stderr,none": 0.00992820318611292,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4867469879518072,
						"acc_stderr,none": 0.010018551648218459,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.385140562248996,
						"acc_stderr,none": 0.00975405283095075,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40803212851405624,
						"acc_stderr,none": 0.009851078965044868,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4433734939759036,
						"acc_stderr,none": 0.009957592660538646,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40562248995983935,
						"acc_stderr,none": 0.00984191815616317,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42329317269076305,
						"acc_stderr,none": 0.009903432138272914,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3461847389558233,
						"acc_stderr,none": 0.009536061379898332,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6286625353468503,
						"acc_stderr,none": 0.06173884837715778,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5903375248180013,
						"acc_stderr,none": 0.01265536903075035,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7802779616148247,
						"acc_stderr,none": 0.010655479709353636,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7107875579086698,
						"acc_stderr,none": 0.011667825388305481,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5605559232296492,
						"acc_stderr,none": 0.01277240869797914,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.600926538716082,
						"acc_stderr,none": 0.012602266005184319,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6637988087359364,
						"acc_stderr,none": 0.012157083081239748,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5473196558570483,
						"acc_stderr,none": 0.012809372866181964,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.685638649900728,
						"acc_stderr,none": 0.011947409363762432,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5552614162806089,
						"acc_stderr,none": 0.012788295970207792,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5837193911317009,
						"acc_stderr,none": 0.012685473350967527,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6366644606221046,
						"acc_stderr,none": 0.01237715330661327,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8109687570240504,
						"acc_stderr,none": 0.03731162075503134,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8705376344086021,
						"acc_stderr,none": 0.00696381913088228,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109368,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.748696558915537,
						"acc_stderr,none": 0.014014234546353827,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.779467680608365,
						"acc_stderr,none": 0.025614420399944934,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6634920634920635,
						"acc_stderr,none": 0.026665559335926015,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7757936507936508,
						"acc_stderr,none": 0.018595723133309875,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/r3-testchunk-blink_pth"
	},
	"./rwkv-x-dev/r3-testchunk2-blink-fixed_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6316234498308907,
						"acc_norm,none": 0.629086809470124,
						"acc_norm_stderr,none": 0.09509523096845733,
						"acc_stderr,none": 0.10743555241824408,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.355625,
						"acc_stderr,none": 0.020646939335588164,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8372388059701492,
						"acc_stderr,none": 0.1449221080542288,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.3074598514936971,
						"acc_norm,none": 0.3074598514936971,
						"acc_norm_stderr,none": 0.060305806736127565,
						"acc_stderr,none": 0.060305806736127565,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5417460695569318,
						"acc_stderr,none": 0.012687135278713072,
						"alias": "glue",
						"f1,none": 0.6661428921456624,
						"f1_stderr,none": 0.00024888756935086706,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.7139530370657869,
						"acc_stderr,none": 0.017560031524704353,
						"alias": "lambada",
						"perplexity,none": 3.7950854210483533,
						"perplexity_stderr,none": 0.24227635624684163
					},
					"lambada_multilingual": {
						"acc,none": 0.5407335532699398,
						"acc_stderr,none": 0.0850178574518962,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.0887400654959,
						"perplexity_stderr,none": 8.282259446550961
					},
					"mmlu": {
						"acc,none": 0.32452642073778654,
						"acc_stderr,none": 0.06345102129784701,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.316471838469713,
						"acc_stderr,none": 0.057767621311088105,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.37141937560347593,
						"acc_stderr,none": 0.04462802346907986,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3477413064673383,
						"acc_stderr,none": 0.06185999366852834,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.26768157310497936,
						"acc_stderr,none": 0.062348176837475004,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.465,
						"acc_stderr,none": 0.0469539571997253,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7420063220623327,
						"acc_norm,none": 0.6328976518349324,
						"acc_norm_stderr,none": 0.01103652906788016,
						"acc_stderr,none": 0.13961337884741626,
						"alias": "pythia",
						"bits_per_byte,none": 0.637240885907737,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5553517462145265,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3372083274427577,
						"perplexity_stderr,none": 0.06578484198116316,
						"word_perplexity,none": 10.61192281770562,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3264109411154914,
						"acc_stderr,none": 0.0015063410627324369,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32558139534883723,
						"bleu_acc_stderr,none": 0.016403989469907856,
						"bleu_diff,none": -6.6241054430445185,
						"bleu_diff_stderr,none": 0.872301214916715,
						"bleu_max,none": 28.446557842552323,
						"bleu_max_stderr,none": 0.8209324682851348,
						"rouge1_acc,none": 0.29498164014687883,
						"rouge1_acc_stderr,none": 0.01596440096558963,
						"rouge1_diff,none": -8.645065759386354,
						"rouge1_diff_stderr,none": 0.9512850928049934,
						"rouge1_max,none": 53.75023202586464,
						"rouge1_max_stderr,none": 0.8682318358000709,
						"rouge2_acc,none": 0.2802937576499388,
						"rouge2_acc_stderr,none": 0.015723139524608777,
						"rouge2_diff,none": -10.715649955491015,
						"rouge2_diff_stderr,none": 1.1540785926450048,
						"rouge2_max,none": 37.87879834839476,
						"rouge2_max_stderr,none": 1.034702579140647,
						"rougeL_acc,none": 0.2937576499388005,
						"rougeL_acc_stderr,none": 0.015945068581236614,
						"rougeL_diff,none": -9.025804782525316,
						"rougeL_diff_stderr,none": 0.9670548109618238,
						"rougeL_max,none": 51.02824795749152,
						"rougeL_max_stderr,none": 0.8862874053491098
					},
					"xcopa": {
						"acc,none": 0.6236363636363635,
						"acc_stderr,none": 0.07128179018885027,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4351807228915663,
						"acc_stderr,none": 0.04872268245174795,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6325130858552434,
						"acc_stderr,none": 0.06008813221654891,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8114182962463475,
						"acc_stderr,none": 0.04535494600752044,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6316234498308907,
						"acc_norm,none": 0.629086809470124,
						"acc_norm_stderr,none": 0.09509523096845733,
						"acc_stderr,none": 0.10743555241824408,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.355625,
						"acc_stderr,none": 0.020646939335588164,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.387,
						"acc_stderr,none": 0.015410011955493932,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.341,
						"acc_stderr,none": 0.014998131348402688,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3416666666666667,
						"acc_stderr,none": 0.01369665877800251,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4044368600682594,
						"acc_norm,none": 0.4283276450511945,
						"acc_norm_stderr,none": 0.014460496367599019,
						"acc_stderr,none": 0.01434203648343617,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7436868686868687,
						"acc_norm,none": 0.7281144781144782,
						"acc_norm_stderr,none": 0.009129795867310497,
						"acc_stderr,none": 0.008958775997918354,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8372388059701492,
						"acc_stderr,none": 0.1449221080542288,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745911,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036277,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.001000000000000001,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.011884495834541669,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037181,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.012975838021968769,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.015459721957493377,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425677,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.011107987548939149,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403623,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.0063463592930338335,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.0059721576223896195,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704156,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832013,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910622,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315141,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.012841374572096928,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.013512312258920843,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.013127502859696244,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.0077436402269192885,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.01123486636423525,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.0042063872496114945,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.401,
						"acc_stderr,none": 0.015506109745498323,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849877,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.01255952792670737,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.675,
						"acc_stderr,none": 0.014818724459095524,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.845,
						"acc_stderr,none": 0.011450157470799464,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139974,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333331,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474937,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621235,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.613,
						"acc_stderr,none": 0.015410011955493937,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747384,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.519,
						"acc_stderr,none": 0.015807874268505846,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.597,
						"acc_stderr,none": 0.015518757419066543,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.014498627873361428,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866442,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.01110798754893915,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037186,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.00944924802766276,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.012486268734370145,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118732,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578159,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240653,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.012583693787968109,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.529,
						"acc_stderr,none": 0.0157926694516289,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.0055683935750813615,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.0088234263669423,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036355,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.013877773329774168,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.01580663942303517,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.010640169792499364,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.0050348137353181865,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.637,
						"acc_stderr,none": 0.015213890444671285,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378242,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024973,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319332,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992433,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406729,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689099,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.971,
						"acc_stderr,none": 0.005309160685757003,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.383,
						"acc_stderr,none": 0.015380102325652715,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.342,
						"acc_stderr,none": 0.015008706182121738,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.3074598514936971,
						"acc_norm,none": 0.3074598514936971,
						"acc_norm_stderr,none": 0.060305806736127565,
						"acc_stderr,none": 0.060305806736127565,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.35502958579881655,
						"acc_norm,none": 0.35502958579881655,
						"acc_norm_stderr,none": 0.03691879594576913,
						"acc_stderr,none": 0.03691879594576913,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.31756756756756754,
						"acc_norm,none": 0.31756756756756754,
						"acc_norm_stderr,none": 0.03839628734149681,
						"acc_stderr,none": 0.03839628734149681,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.033047561588107864,
						"acc_stderr,none": 0.033047561588107864,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.425,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.03920394987159571,
						"acc_stderr,none": 0.03920394987159571,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.28484848484848485,
						"acc_norm,none": 0.28484848484848485,
						"acc_norm_stderr,none": 0.03524390844511784,
						"acc_stderr,none": 0.03524390844511784,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3397129186602871,
						"acc_norm,none": 0.3397129186602871,
						"acc_norm_stderr,none": 0.03283906353745933,
						"acc_stderr,none": 0.03283906353745933,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.21875,
						"acc_norm,none": 0.21875,
						"acc_norm_stderr,none": 0.032784644885244255,
						"acc_stderr,none": 0.032784644885244255,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.35877862595419846,
						"acc_norm,none": 0.35877862595419846,
						"acc_norm_stderr,none": 0.04206739313864908,
						"acc_stderr,none": 0.04206739313864908,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.040263772107873096,
						"acc_stderr,none": 0.040263772107873096,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.04522350077382029,
						"acc_stderr,none": 0.04522350077382029,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.34674922600619196,
						"acc_norm,none": 0.34674922600619196,
						"acc_norm_stderr,none": 0.026522825062324028,
						"acc_stderr,none": 0.026522825062324028,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3088235294117647,
						"acc_norm,none": 0.3088235294117647,
						"acc_norm_stderr,none": 0.03242661719827218,
						"acc_stderr,none": 0.03242661719827218,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.31843575418994413,
						"acc_norm,none": 0.31843575418994413,
						"acc_norm_stderr,none": 0.03491839802265681,
						"acc_stderr,none": 0.03491839802265681,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2742616033755274,
						"acc_norm,none": 0.2742616033755274,
						"acc_norm_stderr,none": 0.029041333510598035,
						"acc_stderr,none": 0.029041333510598035,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.1792452830188679,
						"acc_norm,none": 0.1792452830188679,
						"acc_norm_stderr,none": 0.037431386312552786,
						"acc_stderr,none": 0.037431386312552786,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004223,
						"acc_stderr,none": 0.04742907046004223,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.39622641509433965,
						"acc_norm,none": 0.39622641509433965,
						"acc_norm_stderr,none": 0.04773249298367361,
						"acc_stderr,none": 0.04773249298367361,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.23148148148148148,
						"acc_norm,none": 0.23148148148148148,
						"acc_norm_stderr,none": 0.04077494709252627,
						"acc_stderr,none": 0.04077494709252627,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.04022559246936713,
						"acc_stderr,none": 0.04022559246936713,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2673992673992674,
						"acc_norm,none": 0.2673992673992674,
						"acc_norm_stderr,none": 0.026836713439088864,
						"acc_stderr,none": 0.026836713439088864,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.35294117647058826,
						"acc_norm,none": 0.35294117647058826,
						"acc_norm_stderr,none": 0.033540924375915195,
						"acc_stderr,none": 0.033540924375915195,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.036155076303109344,
						"acc_stderr,none": 0.036155076303109344,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.03558926157606756,
						"acc_stderr,none": 0.03558926157606756,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.3237410071942446,
						"acc_norm,none": 0.3237410071942446,
						"acc_norm_stderr,none": 0.039830507521054596,
						"acc_stderr,none": 0.039830507521054596,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.03815152004368298,
						"acc_stderr,none": 0.03815152004368298,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.31901840490797545,
						"acc_norm,none": 0.31901840490797545,
						"acc_norm_stderr,none": 0.03661997551073836,
						"acc_stderr,none": 0.03661997551073836,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.31976744186046513,
						"acc_norm,none": 0.31976744186046513,
						"acc_norm_stderr,none": 0.0356654553808481,
						"acc_stderr,none": 0.0356654553808481,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.028514456573421415,
						"acc_stderr,none": 0.028514456573421415,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2828282828282828,
						"acc_norm,none": 0.2828282828282828,
						"acc_norm_stderr,none": 0.03208779558786752,
						"acc_stderr,none": 0.03208779558786752,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.39915966386554624,
						"acc_norm,none": 0.39915966386554624,
						"acc_norm_stderr,none": 0.03181110032413926,
						"acc_stderr,none": 0.03181110032413926,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26521739130434785,
						"acc_norm,none": 0.26521739130434785,
						"acc_norm_stderr,none": 0.02917176407847258,
						"acc_stderr,none": 0.02917176407847258,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.3111111111111111,
						"acc_norm,none": 0.3111111111111111,
						"acc_norm_stderr,none": 0.03999262876617723,
						"acc_stderr,none": 0.03999262876617723,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3356643356643357,
						"acc_norm,none": 0.3356643356643357,
						"acc_norm_stderr,none": 0.03962800523347343,
						"acc_stderr,none": 0.03962800523347343,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.30113636363636365,
						"acc_norm,none": 0.30113636363636365,
						"acc_norm_stderr,none": 0.03467837977202437,
						"acc_stderr,none": 0.03467837977202437,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.28859060402684567,
						"acc_norm,none": 0.28859060402684567,
						"acc_norm_stderr,none": 0.03724517629698768,
						"acc_stderr,none": 0.03724517629698768,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.037036671945524846,
						"acc_stderr,none": 0.037036671945524846,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3135593220338983,
						"acc_norm,none": 0.3135593220338983,
						"acc_norm_stderr,none": 0.04289122333662572,
						"acc_stderr,none": 0.04289122333662572,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.19090909090909092,
						"acc_norm,none": 0.19090909090909092,
						"acc_norm_stderr,none": 0.03764425585984926,
						"acc_stderr,none": 0.03764425585984926,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.31746031746031744,
						"acc_norm,none": 0.31746031746031744,
						"acc_norm_stderr,none": 0.041634530313028585,
						"acc_stderr,none": 0.041634530313028585,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336698,
						"acc_stderr,none": 0.03333068663336698,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.37209302325581395,
						"acc_norm,none": 0.37209302325581395,
						"acc_norm_stderr,none": 0.036963693685536064,
						"acc_stderr,none": 0.036963693685536064,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2895377128953771,
						"acc_norm,none": 0.2895377128953771,
						"acc_norm_stderr,none": 0.022399130302514076,
						"acc_stderr,none": 0.022399130302514076,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.42990654205607476,
						"acc_norm,none": 0.42990654205607476,
						"acc_norm_stderr,none": 0.03392112552066969,
						"acc_stderr,none": 0.03392112552066969,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2764227642276423,
						"acc_norm,none": 0.2764227642276423,
						"acc_norm_stderr,none": 0.040490154606224904,
						"acc_stderr,none": 0.040490154606224904,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.32786885245901637,
						"acc_norm,none": 0.32786885245901637,
						"acc_norm_stderr,none": 0.04267606874299955,
						"acc_stderr,none": 0.04267606874299955,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.319047619047619,
						"acc_norm,none": 0.319047619047619,
						"acc_norm_stderr,none": 0.03224133248962465,
						"acc_stderr,none": 0.03224133248962465,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.03565038177453911,
						"acc_stderr,none": 0.03565038177453911,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3439153439153439,
						"acc_norm,none": 0.3439153439153439,
						"acc_norm_stderr,none": 0.0346439012574329,
						"acc_stderr,none": 0.0346439012574329,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.23275862068965517,
						"acc_norm,none": 0.23275862068965517,
						"acc_norm_stderr,none": 0.03940669168337698,
						"acc_stderr,none": 0.03940669168337698,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.32413793103448274,
						"acc_norm,none": 0.32413793103448274,
						"acc_norm_stderr,none": 0.03900432069185554,
						"acc_stderr,none": 0.03900432069185554,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3904761904761905,
						"acc_norm,none": 0.3904761904761905,
						"acc_norm_stderr,none": 0.047838322981141455,
						"acc_stderr,none": 0.047838322981141455,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2742857142857143,
						"acc_norm,none": 0.2742857142857143,
						"acc_norm_stderr,none": 0.03382281937517295,
						"acc_stderr,none": 0.03382281937517295,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27014218009478674,
						"acc_norm,none": 0.27014218009478674,
						"acc_norm_stderr,none": 0.030641194076293145,
						"acc_stderr,none": 0.030641194076293145,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.022360679774997897,
						"acc_stderr,none": 0.022360679774997897,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.40086206896551724,
						"acc_norm,none": 0.40086206896551724,
						"acc_norm_stderr,none": 0.03224444451161072,
						"acc_stderr,none": 0.03224444451161072,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.03613730415279119,
						"acc_stderr,none": 0.03613730415279119,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.037125378336148665,
						"acc_stderr,none": 0.037125378336148665,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3141592920353982,
						"acc_norm,none": 0.3141592920353982,
						"acc_norm_stderr,none": 0.030945344741493037,
						"acc_stderr,none": 0.030945344741493037,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.34545454545454546,
						"acc_norm,none": 0.34545454545454546,
						"acc_norm_stderr,none": 0.03713158067481913,
						"acc_stderr,none": 0.03713158067481913,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336699,
						"acc_stderr,none": 0.03333068663336699,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.31952662721893493,
						"acc_norm,none": 0.31952662721893493,
						"acc_norm_stderr,none": 0.03597530251676528,
						"acc_stderr,none": 0.03597530251676528,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3105590062111801,
						"acc_norm,none": 0.3105590062111801,
						"acc_norm_stderr,none": 0.036581425432887386,
						"acc_stderr,none": 0.036581425432887386,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.0348937065201876,
						"acc_stderr,none": 0.0348937065201876,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.03265986323710906,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5417460695569318,
						"acc_stderr,none": 0.012687135278713072,
						"alias": "glue",
						"f1,none": 0.6661428921456624,
						"f1_stderr,none": 0.00024888756935086706,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.5240987851025692,
						"acc_norm,none": 0.7021509659430393,
						"acc_norm_stderr,none": 0.00456378170737694,
						"acc_stderr,none": 0.004983982396187365,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7139530370657869,
						"acc_stderr,none": 0.017560031524704353,
						"alias": "lambada",
						"perplexity,none": 3.7950854210483533,
						"perplexity_stderr,none": 0.24227635624684163
					},
					"lambada_multilingual": {
						"acc,none": 0.5407335532699398,
						"acc_stderr,none": 0.0850178574518962,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.0887400654959,
						"perplexity_stderr,none": 8.282259446550961
					},
					"lambada_openai": {
						"acc,none": 0.746749466330293,
						"acc_stderr,none": 0.006058634002437436,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3372083274427577,
						"perplexity_stderr,none": 0.06578484198116316
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42771201241994955,
						"acc_stderr,none": 0.006892791023967233,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.356237834253605,
						"perplexity_stderr,none": 1.9037350792182746
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7471375897535416,
						"acc_stderr,none": 0.006055562668610388,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3372066277454686,
						"perplexity_stderr,none": 0.06570186957075035
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4581797011449641,
						"acc_stderr,none": 0.0069415687750082455,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.805106464559913,
						"perplexity_stderr,none": 1.4755459209097186
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5511352610130021,
						"acc_stderr,none": 0.006929452414790846,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.376394980871183,
						"perplexity_stderr,none": 0.7948237160953399
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5195032020182418,
						"acc_stderr,none": 0.006960676273955527,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.56875442004935,
						"perplexity_stderr,none": 1.140380955016065
					},
					"lambada_standard": {
						"acc,none": 0.6811566078012808,
						"acc_stderr,none": 0.006492684061449839,
						"alias": " - lambada_standard",
						"perplexity,none": 4.252738109952723,
						"perplexity_stderr,none": 0.09135204728382146
					},
					"logiqa": {
						"acc,none": 0.25499231950844853,
						"acc_norm,none": 0.27035330261136714,
						"acc_norm_stderr,none": 0.01742069478339314,
						"acc_stderr,none": 0.01709571410527983,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.32452642073778654,
						"acc_stderr,none": 0.06345102129784701,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.03861229196653694,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.35555555555555557,
						"acc_stderr,none": 0.04135176749720386,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.03523807393012047,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252605,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4075471698113208,
						"acc_stderr,none": 0.030242233800854494,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3263888888888889,
						"acc_stderr,none": 0.03921067198982266,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.35260115606936415,
						"acc_stderr,none": 0.03643037168958548,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.19607843137254902,
						"acc_stderr,none": 0.03950581861179962,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252604,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.33617021276595743,
						"acc_stderr,none": 0.030881618520676942,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03947152782669415,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2896551724137931,
						"acc_stderr,none": 0.03780019230438014,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.21428571428571427,
						"acc_stderr,none": 0.02113285918275445,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.040061680838488774,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.048783173121456316,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.38064516129032255,
						"acc_stderr,none": 0.027621717832907036,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.26108374384236455,
						"acc_stderr,none": 0.030903796952114475,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.03825460278380026,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4444444444444444,
						"acc_stderr,none": 0.035402943770953675,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.42487046632124353,
						"acc_stderr,none": 0.035674713352125395,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.33076923076923076,
						"acc_stderr,none": 0.023854795680971118,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.02534809746809785,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.25630252100840334,
						"acc_stderr,none": 0.02835962087053395,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2781456953642384,
						"acc_stderr,none": 0.03658603262763743,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3724770642201835,
						"acc_stderr,none": 0.0207283684576385,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.14351851851851852,
						"acc_stderr,none": 0.02391077925264438,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.44607843137254904,
						"acc_stderr,none": 0.03488845451304974,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.39662447257383965,
						"acc_stderr,none": 0.03184399873811224,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3632286995515695,
						"acc_stderr,none": 0.032277904428505,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.4198473282442748,
						"acc_stderr,none": 0.043285772152629715,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.316471838469713,
						"acc_stderr,none": 0.057767621311088105,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.35537190082644626,
						"acc_stderr,none": 0.0436923632657398,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3611111111111111,
						"acc_stderr,none": 0.04643454608906274,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3128834355828221,
						"acc_stderr,none": 0.036429145782924055,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841044,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4077669902912621,
						"acc_stderr,none": 0.04865777570410769,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3803418803418803,
						"acc_stderr,none": 0.031804252043841,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.04943110704237101,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4227330779054917,
						"acc_stderr,none": 0.017665180351954062,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.32947976878612717,
						"acc_stderr,none": 0.025305258131879723,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23687150837988827,
						"acc_stderr,none": 0.014219570788103986,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3137254901960784,
						"acc_stderr,none": 0.02656892101545715,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.37141937560347593,
						"acc_stderr,none": 0.04462802346907986,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.40514469453376206,
						"acc_stderr,none": 0.02788238379132595,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.36728395061728397,
						"acc_stderr,none": 0.026822801759507887,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2553191489361702,
						"acc_stderr,none": 0.02601199293090201,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2816166883963494,
						"acc_stderr,none": 0.011487783272786696,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3897058823529412,
						"acc_stderr,none": 0.0296246635811597,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.31862745098039214,
						"acc_stderr,none": 0.018850084696468702,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.44545454545454544,
						"acc_stderr,none": 0.047605488214603246,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.22857142857142856,
						"acc_stderr,none": 0.02688214492230774,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3477413064673383,
						"acc_stderr,none": 0.06185999366852834,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.4427860696517413,
						"acc_stderr,none": 0.03512310964123937,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.26768157310497936,
						"acc_stderr,none": 0.062348176837475004,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.0368078369072758,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.391812865497076,
						"acc_stderr,none": 0.03743979825926401,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3464085583290881,
						"acc_stderr,none": 0.004803131292236264,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.34245321399511797,
						"acc_stderr,none": 0.004785916364185231,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7328431372549019,
						"acc_stderr,none": 0.021932668544150206,
						"alias": " - mrpc",
						"f1,none": 0.8340943683409436,
						"f1_stderr,none": 0.015665614274445003
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.049307479224376734,
						"exact_match_stderr,remove_whitespace": 0.0036039851913301914
					},
					"openbookqa": {
						"acc,none": 0.3,
						"acc_norm,none": 0.412,
						"acc_norm_stderr,none": 0.022033677993740862,
						"acc_stderr,none": 0.02051442622562805,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4155,
						"acc_stderr,none": 0.011022278362940799,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3795,
						"acc_stderr,none": 0.010853514379554395,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3845,
						"acc_stderr,none": 0.010880674130529195,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5475,
						"acc_stderr,none": 0.011132557743886096,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.011168006186472578,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.486,
						"acc_stderr,none": 0.011178751372184862,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.01117740878887489,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.465,
						"acc_stderr,none": 0.0469539571997253,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7687704026115343,
						"acc_norm,none": 0.7769314472252449,
						"acc_norm_stderr,none": 0.009713057213018525,
						"acc_stderr,none": 0.00983706318062533,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7420063220623327,
						"acc_norm,none": 0.6328976518349324,
						"acc_norm_stderr,none": 0.01103652906788016,
						"acc_stderr,none": 0.13961337884741626,
						"alias": "pythia",
						"bits_per_byte,none": 0.637240885907737,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5553517462145265,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3372083274427577,
						"perplexity_stderr,none": 0.06578484198116316,
						"word_perplexity,none": 10.61192281770562,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49569833424858134,
						"acc_stderr,none": 0.006765160168388145,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6338609943111551,
						"acc_stderr,none": 0.002395926621295287,
						"alias": " - qqp",
						"f1,none": 0.6646884272997032,
						"f1_stderr,none": 0.0025952897646448207
					},
					"record": {
						"alias": "record",
						"em,none": 0.2726,
						"em_stderr,none": 0.004453190664086395,
						"f1,none": 0.2826552383422852,
						"f1_stderr,none": 0.004463796276450013
					},
					"rte": {
						"acc,none": 0.628158844765343,
						"acc_stderr,none": 0.029091018492217454,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.952,
						"acc_norm,none": 0.932,
						"acc_norm_stderr,none": 0.007964887911291605,
						"acc_stderr,none": 0.0067632641336666756,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.8956422018348624,
						"acc_stderr,none": 0.010359067206812048,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3264109411154914,
						"acc_stderr,none": 0.0015063410627324369,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32558139534883723,
						"bleu_acc_stderr,none": 0.016403989469907856,
						"bleu_diff,none": -6.6241054430445185,
						"bleu_diff_stderr,none": 0.872301214916715,
						"bleu_max,none": 28.446557842552323,
						"bleu_max_stderr,none": 0.8209324682851348,
						"rouge1_acc,none": 0.29498164014687883,
						"rouge1_acc_stderr,none": 0.01596440096558963,
						"rouge1_diff,none": -8.645065759386354,
						"rouge1_diff_stderr,none": 0.9512850928049934,
						"rouge1_max,none": 53.75023202586464,
						"rouge1_max_stderr,none": 0.8682318358000709,
						"rouge2_acc,none": 0.2802937576499388,
						"rouge2_acc_stderr,none": 0.015723139524608777,
						"rouge2_diff,none": -10.715649955491015,
						"rouge2_diff_stderr,none": 1.1540785926450048,
						"rouge2_max,none": 37.87879834839476,
						"rouge2_max_stderr,none": 1.034702579140647,
						"rougeL_acc,none": 0.2937576499388005,
						"rougeL_acc_stderr,none": 0.015945068581236614,
						"rougeL_diff,none": -9.025804782525316,
						"rougeL_diff_stderr,none": 0.9670548109618238,
						"rougeL_max,none": 51.02824795749152,
						"rougeL_max_stderr,none": 0.8862874053491098
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.32558139534883723,
						"bleu_acc_stderr,none": 0.016403989469907856,
						"bleu_diff,none": -6.6241054430445185,
						"bleu_diff_stderr,none": 0.872301214916715,
						"bleu_max,none": 28.446557842552323,
						"bleu_max_stderr,none": 0.8209324682851348,
						"rouge1_acc,none": 0.29498164014687883,
						"rouge1_acc_stderr,none": 0.01596440096558963,
						"rouge1_diff,none": -8.645065759386354,
						"rouge1_diff_stderr,none": 0.9512850928049934,
						"rouge1_max,none": 53.75023202586464,
						"rouge1_max_stderr,none": 0.8682318358000709,
						"rouge2_acc,none": 0.2802937576499388,
						"rouge2_acc_stderr,none": 0.015723139524608777,
						"rouge2_diff,none": -10.715649955491015,
						"rouge2_diff_stderr,none": 1.1540785926450048,
						"rouge2_max,none": 37.87879834839476,
						"rouge2_max_stderr,none": 1.034702579140647,
						"rougeL_acc,none": 0.2937576499388005,
						"rougeL_acc_stderr,none": 0.015945068581236614,
						"rougeL_diff,none": -9.025804782525316,
						"rougeL_diff_stderr,none": 0.9670548109618238,
						"rougeL_max,none": 51.02824795749152,
						"rougeL_max_stderr,none": 0.8862874053491098
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25458996328029376,
						"acc_stderr,none": 0.015250117079156494,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.398231918950689,
						"acc_stderr,none": 0.0141306382724401,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.637240885907737,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5553517462145265,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.61192281770562,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6795580110497238,
						"acc_stderr,none": 0.01311508545768171,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5865384615384616,
						"acc_stderr,none": 0.04852294969729053,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6236363636363635,
						"acc_stderr,none": 0.07128179018885027,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.021854684955611263,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231336,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.01996610354027947,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.748,
						"acc_stderr,none": 0.01943572728224953,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.022252153078595897,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.022149790663861923,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.628,
						"acc_stderr,none": 0.0216371979857224,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.712,
						"acc_stderr,none": 0.02027150383507522,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.020591649571224925,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4351807228915663,
						"acc_stderr,none": 0.04872268245174795,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3381526104417671,
						"acc_stderr,none": 0.009482500057981031,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4610441767068273,
						"acc_stderr,none": 0.009991608448389063,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4835341365461847,
						"acc_stderr,none": 0.010016636930829973,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38313253012048193,
						"acc_stderr,none": 0.009744464994287529,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5385542168674698,
						"acc_stderr,none": 0.009992234275993097,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4875502008032129,
						"acc_stderr,none": 0.010018965593055396,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4907630522088353,
						"acc_stderr,none": 0.010020362530631358,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43815261044176707,
						"acc_stderr,none": 0.00994510647455373,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4718875502008032,
						"acc_stderr,none": 0.010006219242553597,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.38393574297188754,
						"acc_stderr,none": 0.009748321202534386,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40803212851405624,
						"acc_stderr,none": 0.009851078965044873,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4534136546184739,
						"acc_stderr,none": 0.009978476483838964,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41847389558232934,
						"acc_stderr,none": 0.009887951897505942,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41847389558232934,
						"acc_stderr,none": 0.009887951897505937,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3526104417670683,
						"acc_stderr,none": 0.009576746271768752,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6325130858552434,
						"acc_stderr,none": 0.06008813221654891,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.600264725347452,
						"acc_stderr,none": 0.012605764077627145,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7776307081403044,
						"acc_stderr,none": 0.010701277694882511,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.71343481138319,
						"acc_stderr,none": 0.011635910995502246,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5585704831237591,
						"acc_stderr,none": 0.012778538985880637,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6068828590337525,
						"acc_stderr,none": 0.012569701151957319,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6644606221045665,
						"acc_stderr,none": 0.012151164438163909,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5493050959629384,
						"acc_stderr,none": 0.012804412720126668,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.685638649900728,
						"acc_stderr,none": 0.01194740936376243,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5612177365982793,
						"acc_stderr,none": 0.012770319186938004,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5943084050297816,
						"acc_stderr,none": 0.012636170220503923,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6459298477829252,
						"acc_stderr,none": 0.012306905387461634,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8114182962463475,
						"acc_stderr,none": 0.04535494600752044,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8670967741935484,
						"acc_stderr,none": 0.0070417967305786665,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7590361445783133,
						"acc_stderr,none": 0.04722807605987257,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7528675703858185,
						"acc_stderr,none": 0.013936105008393986,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.779467680608365,
						"acc_stderr,none": 0.02561442039994495,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6730158730158731,
						"acc_stderr,none": 0.02647348798089099,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7777777777777778,
						"acc_stderr,none": 0.01853691744855943,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/r3-testchunk2-blink-fixed_pth"
	},
	"./rwkv-x-dev/r3-testchunk2-blink_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6257046223224352,
						"acc_norm,none": 0.6276775648252536,
						"acc_norm_stderr,none": 0.09523456861312146,
						"acc_stderr,none": 0.10905805098245354,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.016479670869547956,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8382686567164179,
						"acc_stderr,none": 0.1533746648235865,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.30771887411500615,
						"acc_norm,none": 0.30771887411500615,
						"acc_norm_stderr,none": 0.0598012064733283,
						"acc_stderr,none": 0.0598012064733283,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5637998241896163,
						"acc_stderr,none": 0.11753376065429981,
						"alias": "glue",
						"f1,none": 0.6865177433141125,
						"f1_stderr,none": 0.0001855121553969969,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.712691635940229,
						"acc_stderr,none": 0.01652671311448163,
						"alias": "lambada",
						"perplexity,none": 3.839968568764962,
						"perplexity_stderr,none": 0.2300788160753412
					},
					"lambada_multilingual": {
						"acc,none": 0.5387153114690473,
						"acc_stderr,none": 0.08422127386869964,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.930366545792992,
						"perplexity_stderr,none": 8.674263111850275
					},
					"mmlu": {
						"acc,none": 0.32174903859849024,
						"acc_stderr,none": 0.06141532981276024,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.31073326248671623,
						"acc_stderr,none": 0.05300174579809486,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3678789829417444,
						"acc_stderr,none": 0.04596500725174761,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.338641533961651,
						"acc_stderr,none": 0.05953393064172072,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.276244846178243,
						"acc_stderr,none": 0.06730341822253266,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4677142857142857,
						"acc_stderr,none": 0.055049266309390346,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7416893845983361,
						"acc_norm,none": 0.631319903018174,
						"acc_norm_stderr,none": 0.011021069513552846,
						"acc_stderr,none": 0.14621649859008823,
						"alias": "pythia",
						"bits_per_byte,none": 0.659210083718825,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5792177206647482,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.40874022684659,
						"perplexity_stderr,none": 0.07105506637451445,
						"word_perplexity,none": 11.512213784558806,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3420106425967564,
						"acc_stderr,none": 0.0017275455606294446,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3561811505507956,
						"bleu_acc_stderr,none": 0.016763790728446325,
						"bleu_diff,none": -0.04418830843983365,
						"bleu_diff_stderr,none": 0.07498898884520837,
						"bleu_max,none": 0.2407329989634263,
						"bleu_max_stderr,none": 0.05237534035086209,
						"rouge1_acc,none": 0.3671970624235006,
						"rouge1_acc_stderr,none": 0.01687480500145318,
						"rouge1_diff,none": -0.11439673791145699,
						"rouge1_diff_stderr,none": 0.10590911790028439,
						"rouge1_max,none": 1.026800278706878,
						"rouge1_max_stderr,none": 0.1457719553783151,
						"rouge2_acc,none": 0.3157894736842105,
						"rouge2_acc_stderr,none": 0.016272287957916885,
						"rouge2_diff,none": -0.11334875594630169,
						"rouge2_diff_stderr,none": 0.1163014374583124,
						"rouge2_max,none": 0.6262977431506198,
						"rouge2_max_stderr,none": 0.11462518844305966,
						"rougeL_acc,none": 0.36107711138310894,
						"rougeL_acc_stderr,none": 0.016814312844836882,
						"rougeL_diff,none": -0.12362089855666174,
						"rougeL_diff_stderr,none": 0.10790551921795612,
						"rougeL_max,none": 0.961642544688371,
						"rougeL_max_stderr,none": 0.13846776561570626
					},
					"xcopa": {
						"acc,none": 0.6247272727272727,
						"acc_stderr,none": 0.07076815037132106,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4351807228915663,
						"acc_stderr,none": 0.04961396343609649,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6301666566391914,
						"acc_stderr,none": 0.06025876178197097,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8100696785794561,
						"acc_stderr,none": 0.03757560804777742,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6257046223224352,
						"acc_norm,none": 0.6276775648252536,
						"acc_norm_stderr,none": 0.09523456861312146,
						"acc_stderr,none": 0.10905805098245354,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.016479670869547956,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.376,
						"acc_stderr,none": 0.015325105508898137,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.351,
						"acc_stderr,none": 0.015100563798316405,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3541666666666667,
						"acc_stderr,none": 0.013811933499570958,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.39505119453924914,
						"acc_norm,none": 0.42662116040955633,
						"acc_norm_stderr,none": 0.014453185592920293,
						"acc_stderr,none": 0.01428589829293817,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7394781144781145,
						"acc_norm,none": 0.7268518518518519,
						"acc_norm_stderr,none": 0.009143032718360346,
						"acc_stderr,none": 0.00900643589033659,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8382686567164179,
						"acc_stderr,none": 0.1533746648235865,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523725,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565643,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000072,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.827,
						"acc_stderr,none": 0.011967214137559934,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525071,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.01288666233227454,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.015524980677122583,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.01316983084342566,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.01110798754893915,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705577983,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584937,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426114,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.005893957816165564,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796391,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474905,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910613,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425686,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.765,
						"acc_stderr,none": 0.013414729030247116,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.01295371756673723,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697596,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.01110798754893915,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910616,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.421,
						"acc_stderr,none": 0.015620595475301315,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381795,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617328,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.679,
						"acc_stderr,none": 0.01477082181793465,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.842,
						"acc_stderr,none": 0.011539894677559575,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487923,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333354,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406725,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651535,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.601,
						"acc_stderr,none": 0.015493193313162906,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.015819173374302706,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.015625625112620663,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.01460648312734276,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.00838416926679638,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.011569479368271305,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.00965801621852431,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340994,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.01246159264665998,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118778,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578247,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333345,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.012461592646659986,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.541,
						"acc_stderr,none": 0.015766025737882168,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452373,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651535,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.003444977194099831,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.742,
						"acc_stderr,none": 0.013842963108656603,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.01580663942303517,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.01042749887234397,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611486,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.634,
						"acc_stderr,none": 0.015240612726405747,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.010605256784796584,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340968,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.814,
						"acc_stderr,none": 0.012310790208412796,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378227,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280311,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315124,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319421,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099224,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.015499685165842596,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.342,
						"acc_stderr,none": 0.015008706182121733,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.30771887411500615,
						"acc_norm,none": 0.30771887411500615,
						"acc_norm_stderr,none": 0.0598012064733283,
						"acc_stderr,none": 0.0598012064733283,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.33727810650887574,
						"acc_norm,none": 0.33727810650887574,
						"acc_norm_stderr,none": 0.03647582250277504,
						"acc_stderr,none": 0.03647582250277504,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.3310810810810811,
						"acc_norm,none": 0.3310810810810811,
						"acc_norm_stderr,none": 0.03881461247660827,
						"acc_stderr,none": 0.03881461247660827,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.21951219512195122,
						"acc_norm,none": 0.21951219512195122,
						"acc_norm_stderr,none": 0.03242041613395385,
						"acc_stderr,none": 0.03242041613395385,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.40625,
						"acc_norm,none": 0.40625,
						"acc_norm_stderr,none": 0.03894932504400619,
						"acc_stderr,none": 0.03894932504400619,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.035679697722680474,
						"acc_stderr,none": 0.035679697722680474,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.37320574162679426,
						"acc_norm,none": 0.37320574162679426,
						"acc_norm_stderr,none": 0.033535516193483675,
						"acc_stderr,none": 0.033535516193483675,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3511450381679389,
						"acc_norm,none": 0.3511450381679389,
						"acc_norm_stderr,none": 0.0418644516301375,
						"acc_stderr,none": 0.0418644516301375,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.33088235294117646,
						"acc_norm,none": 0.33088235294117646,
						"acc_norm_stderr,none": 0.040496842259456614,
						"acc_stderr,none": 0.040496842259456614,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.27102803738317754,
						"acc_norm,none": 0.27102803738317754,
						"acc_norm_stderr,none": 0.043172737765666686,
						"acc_stderr,none": 0.043172737765666686,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.33436532507739936,
						"acc_norm,none": 0.33436532507739936,
						"acc_norm_stderr,none": 0.026290609195557965,
						"acc_stderr,none": 0.026290609195557965,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.03198001660115072,
						"acc_stderr,none": 0.03198001660115072,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.329608938547486,
						"acc_norm,none": 0.329608938547486,
						"acc_norm_stderr,none": 0.035233322309922176,
						"acc_stderr,none": 0.035233322309922176,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.270042194092827,
						"acc_norm,none": 0.270042194092827,
						"acc_norm_stderr,none": 0.028900721906293426,
						"acc_stderr,none": 0.028900721906293426,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.16037735849056603,
						"acc_norm,none": 0.16037735849056603,
						"acc_norm_stderr,none": 0.03581120619691076,
						"acc_stderr,none": 0.03581120619691076,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.411214953271028,
						"acc_norm,none": 0.411214953271028,
						"acc_norm_stderr,none": 0.04779251692801369,
						"acc_stderr,none": 0.04779251692801369,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.41509433962264153,
						"acc_norm,none": 0.41509433962264153,
						"acc_norm_stderr,none": 0.04808633394970665,
						"acc_stderr,none": 0.04808633394970665,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.23148148148148148,
						"acc_norm,none": 0.23148148148148148,
						"acc_norm_stderr,none": 0.04077494709252627,
						"acc_stderr,none": 0.04077494709252627,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.04022559246936713,
						"acc_stderr,none": 0.04022559246936713,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.27472527472527475,
						"acc_norm,none": 0.27472527472527475,
						"acc_norm_stderr,none": 0.02706550456438953,
						"acc_stderr,none": 0.02706550456438953,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.03320574612945431,
						"acc_stderr,none": 0.03320574612945431,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.036155076303109344,
						"acc_stderr,none": 0.036155076303109344,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.035589261576067566,
						"acc_stderr,none": 0.035589261576067566,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.30935251798561153,
						"acc_norm,none": 0.30935251798561153,
						"acc_norm_stderr,none": 0.03934735112547112,
						"acc_stderr,none": 0.03934735112547112,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.037676093121953455,
						"acc_stderr,none": 0.037676093121953455,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.31901840490797545,
						"acc_norm,none": 0.31901840490797545,
						"acc_norm_stderr,none": 0.03661997551073836,
						"acc_stderr,none": 0.03661997551073836,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.3023255813953488,
						"acc_norm,none": 0.3023255813953488,
						"acc_norm_stderr,none": 0.03512091263428369,
						"acc_stderr,none": 0.03512091263428369,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.27380952380952384,
						"acc_norm,none": 0.27380952380952384,
						"acc_norm_stderr,none": 0.028145741115683864,
						"acc_stderr,none": 0.028145741115683864,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2878787878787879,
						"acc_norm,none": 0.2878787878787879,
						"acc_norm_stderr,none": 0.03225883512300992,
						"acc_stderr,none": 0.03225883512300992,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.40756302521008403,
						"acc_norm,none": 0.40756302521008403,
						"acc_norm_stderr,none": 0.03191863374478465,
						"acc_stderr,none": 0.03191863374478465,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.02901713355938127,
						"acc_stderr,none": 0.02901713355938127,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.31851851851851853,
						"acc_norm,none": 0.31851851851851853,
						"acc_norm_stderr,none": 0.04024778401977111,
						"acc_stderr,none": 0.04024778401977111,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3146853146853147,
						"acc_norm,none": 0.3146853146853147,
						"acc_norm_stderr,none": 0.03897077881510411,
						"acc_stderr,none": 0.03897077881510411,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2840909090909091,
						"acc_norm,none": 0.2840909090909091,
						"acc_norm_stderr,none": 0.034090909090909075,
						"acc_stderr,none": 0.034090909090909075,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.31543624161073824,
						"acc_norm,none": 0.31543624161073824,
						"acc_norm_stderr,none": 0.03819723167141383,
						"acc_stderr,none": 0.03819723167141383,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.032793177922689494,
						"acc_stderr,none": 0.032793177922689494,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.037036671945524846,
						"acc_stderr,none": 0.037036671945524846,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2966101694915254,
						"acc_norm,none": 0.2966101694915254,
						"acc_norm_stderr,none": 0.04222776832233627,
						"acc_stderr,none": 0.04222776832233627,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.20909090909090908,
						"acc_norm,none": 0.20909090909090908,
						"acc_norm_stderr,none": 0.038950910157241364,
						"acc_stderr,none": 0.038950910157241364,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.3253968253968254,
						"acc_norm,none": 0.3253968253968254,
						"acc_norm_stderr,none": 0.041905964388711366,
						"acc_stderr,none": 0.041905964388711366,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.3027027027027027,
						"acc_norm,none": 0.3027027027027027,
						"acc_norm_stderr,none": 0.03386945658791665,
						"acc_stderr,none": 0.03386945658791665,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.36627906976744184,
						"acc_norm,none": 0.36627906976744184,
						"acc_norm_stderr,none": 0.036843172681015855,
						"acc_stderr,none": 0.036843172681015855,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2944038929440389,
						"acc_norm,none": 0.2944038929440389,
						"acc_norm_stderr,none": 0.022509089804193687,
						"acc_stderr,none": 0.022509089804193687,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.4205607476635514,
						"acc_norm,none": 0.4205607476635514,
						"acc_norm_stderr,none": 0.03382427699890585,
						"acc_stderr,none": 0.03382427699890585,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.25203252032520324,
						"acc_norm,none": 0.25203252032520324,
						"acc_norm_stderr,none": 0.03930879526823993,
						"acc_stderr,none": 0.03930879526823993,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.32786885245901637,
						"acc_norm,none": 0.32786885245901637,
						"acc_norm_stderr,none": 0.04267606874299955,
						"acc_stderr,none": 0.04267606874299955,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3476190476190476,
						"acc_norm,none": 0.3476190476190476,
						"acc_norm_stderr,none": 0.03294043089165083,
						"acc_stderr,none": 0.03294043089165083,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.03565038177453911,
						"acc_stderr,none": 0.03565038177453911,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3439153439153439,
						"acc_norm,none": 0.3439153439153439,
						"acc_norm_stderr,none": 0.0346439012574329,
						"acc_stderr,none": 0.0346439012574329,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.23275862068965517,
						"acc_norm,none": 0.23275862068965517,
						"acc_norm_stderr,none": 0.039406691683377,
						"acc_stderr,none": 0.039406691683377,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.31724137931034485,
						"acc_norm,none": 0.31724137931034485,
						"acc_norm_stderr,none": 0.03878352372138621,
						"acc_stderr,none": 0.03878352372138621,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.04761904761904763,
						"acc_stderr,none": 0.04761904761904763,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.29714285714285715,
						"acc_norm,none": 0.29714285714285715,
						"acc_norm_stderr,none": 0.03464507889884372,
						"acc_stderr,none": 0.03464507889884372,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27488151658767773,
						"acc_norm,none": 0.27488151658767773,
						"acc_norm_stderr,none": 0.030808291124780316,
						"acc_stderr,none": 0.030808291124780316,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2526595744680851,
						"acc_norm,none": 0.2526595744680851,
						"acc_norm_stderr,none": 0.02243941258278639,
						"acc_stderr,none": 0.02243941258278639,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.39655172413793105,
						"acc_norm,none": 0.39655172413793105,
						"acc_norm_stderr,none": 0.03218577394203013,
						"acc_stderr,none": 0.03218577394203013,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.03613730415279119,
						"acc_stderr,none": 0.03613730415279119,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.33185840707964603,
						"acc_norm,none": 0.33185840707964603,
						"acc_norm_stderr,none": 0.031392030462821255,
						"acc_stderr,none": 0.031392030462821255,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.036085410115739666,
						"acc_stderr,none": 0.036085410115739666,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336699,
						"acc_stderr,none": 0.03333068663336699,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.33136094674556216,
						"acc_norm,none": 0.33136094674556216,
						"acc_norm_stderr,none": 0.03631548844087169,
						"acc_stderr,none": 0.03631548844087169,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.32298136645962733,
						"acc_norm,none": 0.32298136645962733,
						"acc_norm_stderr,none": 0.03696826370174651,
						"acc_stderr,none": 0.03696826370174651,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.03541088558070896,
						"acc_stderr,none": 0.03541088558070896,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.032659863237109066,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5637998241896163,
						"acc_stderr,none": 0.11753376065429981,
						"alias": "glue",
						"f1,none": 0.6865177433141125,
						"f1_stderr,none": 0.0001855121553969969,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.5204142601075483,
						"acc_norm,none": 0.7000597490539733,
						"acc_norm_stderr,none": 0.004572949924250638,
						"acc_stderr,none": 0.004985620773683431,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.712691635940229,
						"acc_stderr,none": 0.01652671311448163,
						"alias": "lambada",
						"perplexity,none": 3.839968568764962,
						"perplexity_stderr,none": 0.2300788160753412
					},
					"lambada_multilingual": {
						"acc,none": 0.5387153114690473,
						"acc_stderr,none": 0.08422127386869964,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.930366545792992,
						"perplexity_stderr,none": 8.674263111850275
					},
					"lambada_openai": {
						"acc,none": 0.7430622938094315,
						"acc_stderr,none": 0.006087494839873366,
						"alias": " - lambada_openai",
						"perplexity,none": 3.40874022684659,
						"perplexity_stderr,none": 0.07105506637451445
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4261595187269552,
						"acc_stderr,none": 0.0068895960716536375,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 35.677252512976565,
						"perplexity_stderr,none": 1.9998840410350258
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7436444789443043,
						"acc_stderr,none": 0.0060829758093544775,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.4108266438009314,
						"perplexity_stderr,none": 0.07117586466006506
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45992625654958275,
						"acc_stderr,none": 0.006943568216279225,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 31.480541175884508,
						"perplexity_stderr,none": 1.575832057795532
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5476421502037648,
						"acc_stderr,none": 0.006934283157219039,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.858662714268622,
						"perplexity_stderr,none": 0.8264654275405828
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5162041529206287,
						"acc_stderr,none": 0.006962318518940972,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 22.224549682034347,
						"perplexity_stderr,none": 1.1822386008427603
					},
					"lambada_standard": {
						"acc,none": 0.6821269163594023,
						"acc_stderr,none": 0.0064874129551929765,
						"alias": " - lambada_standard",
						"perplexity,none": 4.269753037985515,
						"perplexity_stderr,none": 0.09192057728015186
					},
					"logiqa": {
						"acc,none": 0.24731182795698925,
						"acc_norm,none": 0.2642089093701997,
						"acc_norm_stderr,none": 0.017293954549744518,
						"acc_stderr,none": 0.016922842446712383,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.32174903859849024,
						"acc_stderr,none": 0.06141532981276024,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.16,
						"acc_stderr,none": 0.03684529491774709,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.362962962962963,
						"acc_stderr,none": 0.04153948404742398,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.03523807393012047,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4075471698113208,
						"acc_stderr,none": 0.030242233800854494,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.039420826399272135,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3699421965317919,
						"acc_stderr,none": 0.0368122963339432,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.17647058823529413,
						"acc_stderr,none": 0.0379328118530781,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3404255319148936,
						"acc_stderr,none": 0.03097669299853443,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21929824561403508,
						"acc_stderr,none": 0.03892431106518754,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.31724137931034485,
						"acc_stderr,none": 0.03878352372138622,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.21957671957671956,
						"acc_stderr,none": 0.021320018599770348,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.30158730158730157,
						"acc_stderr,none": 0.04104947269903394,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3903225806451613,
						"acc_stderr,none": 0.027751256636969573,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.28078817733990147,
						"acc_stderr,none": 0.03161856335358611,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.38181818181818183,
						"acc_stderr,none": 0.037937131711656344,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.42424242424242425,
						"acc_stderr,none": 0.035212249088415824,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.42487046632124353,
						"acc_stderr,none": 0.035674713352125395,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.30256410256410254,
						"acc_stderr,none": 0.02329088805377273,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2518518518518518,
						"acc_stderr,none": 0.026466117538959916,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.25630252100840334,
						"acc_stderr,none": 0.028359620870533946,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.271523178807947,
						"acc_stderr,none": 0.03631329803969653,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3743119266055046,
						"acc_stderr,none": 0.020748959408988313,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.1527777777777778,
						"acc_stderr,none": 0.024536326026134234,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.43137254901960786,
						"acc_stderr,none": 0.03476099060501637,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.37130801687763715,
						"acc_stderr,none": 0.031450686007448596,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.34080717488789236,
						"acc_stderr,none": 0.03181149747055358,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3816793893129771,
						"acc_stderr,none": 0.042607351576445594,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.31073326248671623,
						"acc_stderr,none": 0.05300174579809486,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.040655781409087044,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.35185185185185186,
						"acc_stderr,none": 0.046166311118017125,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3128834355828221,
						"acc_stderr,none": 0.036429145782924055,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.04287858751340455,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.44660194174757284,
						"acc_stderr,none": 0.04922424153458934,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.39316239316239315,
						"acc_stderr,none": 0.03199957924651047,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.04943110704237101,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.41507024265644954,
						"acc_stderr,none": 0.01762013700365527,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.32947976878612717,
						"acc_stderr,none": 0.025305258131879737,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.31699346405228757,
						"acc_stderr,none": 0.02664327847450875,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3678789829417444,
						"acc_stderr,none": 0.04596500725174761,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3987138263665595,
						"acc_stderr,none": 0.0278093225857745,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.35802469135802467,
						"acc_stderr,none": 0.0266756119260371,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25886524822695034,
						"acc_stderr,none": 0.026129572527180848,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2796610169491525,
						"acc_stderr,none": 0.01146339739386197,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3602941176470588,
						"acc_stderr,none": 0.029163128570670733,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3022875816993464,
						"acc_stderr,none": 0.01857923271111388,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.44545454545454544,
						"acc_stderr,none": 0.047605488214603246,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2571428571428571,
						"acc_stderr,none": 0.02797982353874455,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.338641533961651,
						"acc_stderr,none": 0.05953393064172072,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.43283582089552236,
						"acc_stderr,none": 0.03503490923673281,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.276244846178243,
						"acc_stderr,none": 0.06730341822253266,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695236,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3253012048192771,
						"acc_stderr,none": 0.03647168523683228,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.391812865497076,
						"acc_stderr,none": 0.037439798259264,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3548650025471218,
						"acc_stderr,none": 0.0048298524069489875,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.34794548413344184,
						"acc_stderr,none": 0.00480395253343557,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7328431372549019,
						"acc_stderr,none": 0.021932668544150196,
						"alias": " - mrpc",
						"f1,none": 0.8320493066255779,
						"f1_stderr,none": 0.015914925812773578
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.004986149584487534,
						"exact_match_stderr,remove_whitespace": 0.0011724754922561936
					},
					"openbookqa": {
						"acc,none": 0.306,
						"acc_norm,none": 0.416,
						"acc_norm_stderr,none": 0.02206494331392887,
						"acc_stderr,none": 0.020629569998345428,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.416,
						"acc_stderr,none": 0.011024190055654285,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3795,
						"acc_stderr,none": 0.010853514379554384,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.386,
						"acc_stderr,none": 0.010888584877766432,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.011130400617630758,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.529,
						"acc_stderr,none": 0.011164310140373718,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5055,
						"acc_stderr,none": 0.011182459420867631,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5095,
						"acc_stderr,none": 0.011181117282805221,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4677142857142857,
						"acc_stderr,none": 0.055049266309390346,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7709466811751904,
						"acc_norm,none": 0.7736670293797606,
						"acc_norm_stderr,none": 0.009763294246879424,
						"acc_stderr,none": 0.009804509865175504,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7416893845983361,
						"acc_norm,none": 0.631319903018174,
						"acc_norm_stderr,none": 0.011021069513552846,
						"acc_stderr,none": 0.14621649859008823,
						"alias": "pythia",
						"bits_per_byte,none": 0.659210083718825,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5792177206647482,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.40874022684659,
						"perplexity_stderr,none": 0.07105506637451445,
						"word_perplexity,none": 11.512213784558806,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49697968149368477,
						"acc_stderr,none": 0.0067652871181183415,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6710610932475884,
						"acc_stderr,none": 0.0023366418603591992,
						"alias": " - qqp",
						"f1,none": 0.6852829117069361,
						"f1_stderr,none": 0.0025968560110239893
					},
					"record": {
						"alias": "record",
						"em,none": 0.2633,
						"em_stderr,none": 0.004404458073939524,
						"f1,none": 0.2729719049990177,
						"f1_stderr,none": 0.004416870564697776
					},
					"rte": {
						"acc,none": 0.6101083032490975,
						"acc_stderr,none": 0.029357625083848052,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.946,
						"acc_norm,none": 0.922,
						"acc_norm_stderr,none": 0.008484573530118585,
						"acc_stderr,none": 0.007150883521295432,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.7889908256880734,
						"acc_stderr,none": 0.01382539563581968,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3420106425967564,
						"acc_stderr,none": 0.0017275455606294446,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3561811505507956,
						"bleu_acc_stderr,none": 0.016763790728446325,
						"bleu_diff,none": -0.04418830843983365,
						"bleu_diff_stderr,none": 0.07498898884520837,
						"bleu_max,none": 0.2407329989634263,
						"bleu_max_stderr,none": 0.05237534035086209,
						"rouge1_acc,none": 0.3671970624235006,
						"rouge1_acc_stderr,none": 0.01687480500145318,
						"rouge1_diff,none": -0.11439673791145699,
						"rouge1_diff_stderr,none": 0.10590911790028439,
						"rouge1_max,none": 1.026800278706878,
						"rouge1_max_stderr,none": 0.1457719553783151,
						"rouge2_acc,none": 0.3157894736842105,
						"rouge2_acc_stderr,none": 0.016272287957916885,
						"rouge2_diff,none": -0.11334875594630169,
						"rouge2_diff_stderr,none": 0.1163014374583124,
						"rouge2_max,none": 0.6262977431506198,
						"rouge2_max_stderr,none": 0.11462518844305966,
						"rougeL_acc,none": 0.36107711138310894,
						"rougeL_acc_stderr,none": 0.016814312844836882,
						"rougeL_diff,none": -0.12362089855666174,
						"rougeL_diff_stderr,none": 0.10790551921795612,
						"rougeL_max,none": 0.961642544688371,
						"rougeL_max_stderr,none": 0.13846776561570626
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3561811505507956,
						"bleu_acc_stderr,none": 0.016763790728446325,
						"bleu_diff,none": -0.04418830843983365,
						"bleu_diff_stderr,none": 0.07498898884520837,
						"bleu_max,none": 0.2407329989634263,
						"bleu_max_stderr,none": 0.05237534035086209,
						"rouge1_acc,none": 0.3671970624235006,
						"rouge1_acc_stderr,none": 0.01687480500145318,
						"rouge1_diff,none": -0.11439673791145699,
						"rouge1_diff_stderr,none": 0.10590911790028439,
						"rouge1_max,none": 1.026800278706878,
						"rouge1_max_stderr,none": 0.1457719553783151,
						"rouge2_acc,none": 0.3157894736842105,
						"rouge2_acc_stderr,none": 0.016272287957916885,
						"rouge2_diff,none": -0.11334875594630169,
						"rouge2_diff_stderr,none": 0.1163014374583124,
						"rouge2_max,none": 0.6262977431506198,
						"rouge2_max_stderr,none": 0.11462518844305966,
						"rougeL_acc,none": 0.36107711138310894,
						"rougeL_acc_stderr,none": 0.016814312844836882,
						"rougeL_diff,none": -0.12362089855666174,
						"rougeL_diff_stderr,none": 0.10790551921795612,
						"rougeL_max,none": 0.961642544688371,
						"rougeL_max_stderr,none": 0.13846776561570626
					},
					"truthfulqa_mc1": {
						"acc,none": 0.26438188494492043,
						"acc_stderr,none": 0.015438211119522502,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.41963940024859236,
						"acc_stderr,none": 0.014214967545774367,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.659210083718825,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5792177206647482,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 11.512213784558806,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6748224151539068,
						"acc_stderr,none": 0.013165525471764349,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5192307692307693,
						"acc_stderr,none": 0.0492300107297805,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6247272727272727,
						"acc_stderr,none": 0.07076815037132106,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.598,
						"acc_stderr,none": 0.021948929609938612,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.022352791650914163,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.019920483209566072,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.746,
						"acc_stderr,none": 0.01948659680164338,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.022374298166353185,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.022252153078595897,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228134,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.022064943313928862,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.02161328916516578,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.712,
						"acc_stderr,none": 0.02027150383507522,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.02055326917420918,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4351807228915663,
						"acc_stderr,none": 0.04961396343609649,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3369477911646586,
						"acc_stderr,none": 0.009474203778757706,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.463855421686747,
						"acc_stderr,none": 0.009995852282822383,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4847389558232932,
						"acc_stderr,none": 0.01001740350857898,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38072289156626504,
						"acc_stderr,none": 0.009732727412507504,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5333333333333333,
						"acc_stderr,none": 0.00999977679318764,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4919678714859438,
						"acc_stderr,none": 0.010020779633955259,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4959839357429719,
						"acc_stderr,none": 0.0100217495745559,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43333333333333335,
						"acc_stderr,none": 0.009932588282324245,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4795180722891566,
						"acc_stderr,none": 0.010013660629930818,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.38835341365461845,
						"acc_stderr,none": 0.009769028875673286,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40843373493975904,
						"acc_stderr,none": 0.00985258191903224,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4506024096385542,
						"acc_stderr,none": 0.00997304277481168,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41164658634538154,
						"acc_stderr,none": 0.009864360821750344,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42088353413654617,
						"acc_stderr,none": 0.009895812914052204,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3473895582329317,
						"acc_stderr,none": 0.009543835409334902,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6301666566391914,
						"acc_stderr,none": 0.06025876178197097,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5923229649238915,
						"acc_stderr,none": 0.012645876488040305,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7776307081403044,
						"acc_stderr,none": 0.010701277694882511,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7088021178027796,
						"acc_stderr,none": 0.011691443511878192,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5612177365982793,
						"acc_stderr,none": 0.012770319186938004,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6048974189278623,
						"acc_stderr,none": 0.012580772976133262,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6591661151555261,
						"acc_stderr,none": 0.012197767350433122,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5453342157511581,
						"acc_stderr,none": 0.012814127367359405,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.686300463269358,
						"acc_stderr,none": 0.011940585179444298,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5579086697551291,
						"acc_stderr,none": 0.012780536370279764,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5949702183984117,
						"acc_stderr,none": 0.012632887218751379,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6432825943084051,
						"acc_stderr,none": 0.012327487677110359,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8100696785794561,
						"acc_stderr,none": 0.03757560804777742,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8701075268817204,
						"acc_stderr,none": 0.006973653965627702,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109368,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7518248175182481,
						"acc_stderr,none": 0.013955800392484955,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7680608365019012,
						"acc_stderr,none": 0.026075593860304686,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6571428571428571,
						"acc_stderr,none": 0.02678685165920093,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7738095238095238,
						"acc_stderr,none": 0.018653923879063377,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/r3-testchunk2-blink_pth"
	},
	"./rwkv-x-dev/r3-testchunk2_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6262683201803834,
						"acc_norm,none": 0.6257046223224352,
						"acc_norm_stderr,none": 0.09113134882344597,
						"acc_stderr,none": 0.10732742108437791,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.359375,
						"acc_stderr,none": 0.0176485793476215,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8384328358208956,
						"acc_stderr,none": 0.14202091546074838,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.30357451217406306,
						"acc_norm,none": 0.30357451217406306,
						"acc_norm_stderr,none": 0.05580815942501653,
						"acc_stderr,none": 0.05580815942501653,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5764203191996188,
						"acc_stderr,none": 0.014022989650814058,
						"alias": "glue",
						"f1,none": 0.6883908762016194,
						"f1_stderr,none": 0.00020394844180764653,
						"mcc,none": 0.00286100001416597,
						"mcc_stderr,none": 0.030802167125592427
					},
					"lambada": {
						"acc,none": 0.7135649136425384,
						"acc_stderr,none": 0.016121843790909866,
						"alias": "lambada",
						"perplexity,none": 3.803736745341393,
						"perplexity_stderr,none": 0.22844847367452314
					},
					"lambada_multilingual": {
						"acc,none": 0.5373568794876771,
						"acc_stderr,none": 0.08485396843250168,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.000058641883392,
						"perplexity_stderr,none": 8.215000706142517
					},
					"mmlu": {
						"acc,none": 0.3321464178891896,
						"acc_stderr,none": 0.06091173753049962,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3256110520722636,
						"acc_stderr,none": 0.059599616018790984,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.37013196009011906,
						"acc_stderr,none": 0.05585574688367252,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3539161520961976,
						"acc_stderr,none": 0.048395331555541835,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.28322232794164287,
						"acc_stderr,none": 0.05862955260080505,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.45671428571428574,
						"acc_stderr,none": 0.05326292303210276,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7432960904796232,
						"acc_norm,none": 0.6296361239391683,
						"acc_norm_stderr,none": 0.010264891483671414,
						"acc_stderr,none": 0.13790091970498453,
						"alias": "pythia",
						"bits_per_byte,none": 0.6325378320103602,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5502896971517328,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.375119526077462,
						"perplexity_stderr,none": 0.06615221087924655,
						"word_perplexity,none": 10.428536711782357,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3302969599916217,
						"acc_stderr,none": 0.001652651966553619,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3488372093023256,
						"bleu_acc_stderr,none": 0.016684419859986886,
						"bleu_diff,none": -4.7676025757365466,
						"bleu_diff_stderr,none": 0.9399419703452422,
						"bleu_max,none": 28.583595303310176,
						"bleu_max_stderr,none": 0.8227729682715753,
						"rouge1_acc,none": 0.3157894736842105,
						"rouge1_acc_stderr,none": 0.01627228795791692,
						"rouge1_diff,none": -5.9031435220229875,
						"rouge1_diff_stderr,none": 1.0814414904516871,
						"rouge1_max,none": 53.89250627842206,
						"rouge1_max_stderr,none": 0.8743965162035956,
						"rouge2_acc,none": 0.29008567931456547,
						"rouge2_acc_stderr,none": 0.01588623687420952,
						"rouge2_diff,none": -7.549674880493665,
						"rouge2_diff_stderr,none": 1.2612143030210712,
						"rouge2_max,none": 38.248938544143805,
						"rouge2_max_stderr,none": 1.0558298668477941,
						"rougeL_acc,none": 0.32802937576499386,
						"rougeL_acc_stderr,none": 0.016435632932815032,
						"rougeL_diff,none": -5.8769273011964565,
						"rougeL_diff_stderr,none": 1.098816821965796,
						"rougeL_max,none": 51.29782509190946,
						"rougeL_max_stderr,none": 0.8984521547089732
					},
					"xcopa": {
						"acc,none": 0.6218181818181818,
						"acc_stderr,none": 0.0711251373879857,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43991967871485943,
						"acc_stderr,none": 0.04533624542720319,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6330545695204861,
						"acc_stderr,none": 0.05302993620393225,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8035513598561475,
						"acc_stderr,none": 0.035442329439520394,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6262683201803834,
						"acc_norm,none": 0.6257046223224352,
						"acc_norm_stderr,none": 0.09113134882344597,
						"acc_stderr,none": 0.10732742108437791,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.359375,
						"acc_stderr,none": 0.0176485793476215,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.015356947477797573,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.015039986742055235,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3541666666666667,
						"acc_stderr,none": 0.01381193349957096,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3993174061433447,
						"acc_norm,none": 0.4334470989761092,
						"acc_norm_stderr,none": 0.014481376224558896,
						"acc_stderr,none": 0.014312094557946709,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7382154882154882,
						"acc_norm,none": 0.7205387205387206,
						"acc_norm_stderr,none": 0.009207838142597237,
						"acc_stderr,none": 0.009020523527210176,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8384328358208956,
						"acc_stderr,none": 0.14202091546074838,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.00882342636694231,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.003845749574503008,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000115,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.011800434324644586,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.00900889339265153,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877364,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.601,
						"acc_stderr,none": 0.015493193313162908,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877366,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042965,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.00358383088940363,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406136,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118759,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.005893957816165586,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919284,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178329,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244068,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.796,
						"acc_stderr,none": 0.0127493743590244,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.769,
						"acc_stderr,none": 0.013334797216936431,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.01246159264665999,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.00838416926679639,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.01182860583145426,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745029898,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.432,
						"acc_stderr,none": 0.015672320237336206,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.01110798754893915,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.798,
						"acc_stderr,none": 0.012702651587655139,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.014658474370509007,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160336,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474924,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333445,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792929,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847165,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.015275252316519357,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849877,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.527,
						"acc_stderr,none": 0.01579621855130261,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.01545972195749338,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.014658474370509005,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832026,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992438,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621219,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.0096168333396958,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.012583693787968132,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406093,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000109,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704157,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099184,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.01577824302490459,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.0058145342727349125,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592078,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030067,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.727,
						"acc_stderr,none": 0.014095022868717595,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.476,
						"acc_stderr,none": 0.015801065586651758,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341676,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.00619987406633706,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.632,
						"acc_stderr,none": 0.0152580735615218,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.01049924922240803,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696846,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.818,
						"acc_stderr,none": 0.012207580637662134,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475284,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286444,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796393,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.00442940398017834,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.0053971408290991955,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.401,
						"acc_stderr,none": 0.015506109745498329,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.355,
						"acc_stderr,none": 0.01513949154378053,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.30357451217406306,
						"acc_norm,none": 0.30357451217406306,
						"acc_norm_stderr,none": 0.05580815942501653,
						"acc_stderr,none": 0.05580815942501653,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2958579881656805,
						"acc_norm,none": 0.2958579881656805,
						"acc_norm_stderr,none": 0.035214144124964784,
						"acc_stderr,none": 0.035214144124964784,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.30405405405405406,
						"acc_norm,none": 0.30405405405405406,
						"acc_norm_stderr,none": 0.03794062549620372,
						"acc_stderr,none": 0.03794062549620372,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.033047561588107864,
						"acc_stderr,none": 0.033047561588107864,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.40625,
						"acc_norm,none": 0.40625,
						"acc_norm_stderr,none": 0.03894932504400619,
						"acc_stderr,none": 0.03894932504400619,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03477691162163659,
						"acc_stderr,none": 0.03477691162163659,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3444976076555024,
						"acc_norm,none": 0.3444976076555024,
						"acc_norm_stderr,none": 0.03294948099678349,
						"acc_stderr,none": 0.03294948099678349,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2125,
						"acc_norm,none": 0.2125,
						"acc_norm_stderr,none": 0.03244189290245472,
						"acc_stderr,none": 0.03244189290245472,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.366412213740458,
						"acc_norm,none": 0.366412213740458,
						"acc_norm_stderr,none": 0.042258754519696386,
						"acc_stderr,none": 0.042258754519696386,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3161764705882353,
						"acc_norm,none": 0.3161764705882353,
						"acc_norm_stderr,none": 0.040019338846834944,
						"acc_stderr,none": 0.040019338846834944,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.04522350077382029,
						"acc_stderr,none": 0.04522350077382029,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.34365325077399383,
						"acc_norm,none": 0.34365325077399383,
						"acc_norm_stderr,none": 0.02646664923557932,
						"acc_stderr,none": 0.02646664923557932,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.03283472056108567,
						"acc_stderr,none": 0.03283472056108567,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.329608938547486,
						"acc_norm,none": 0.329608938547486,
						"acc_norm_stderr,none": 0.03523332230992218,
						"acc_stderr,none": 0.03523332230992218,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.28270042194092826,
						"acc_norm,none": 0.28270042194092826,
						"acc_norm_stderr,none": 0.02931281415395592,
						"acc_stderr,none": 0.02931281415395592,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800374,
						"acc_stderr,none": 0.04142972007800374,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004223,
						"acc_stderr,none": 0.04742907046004223,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3867924528301887,
						"acc_norm,none": 0.3867924528301887,
						"acc_norm_stderr,none": 0.04752784159123843,
						"acc_stderr,none": 0.04752784159123843,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04186091791394607,
						"acc_stderr,none": 0.04186091791394607,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.042324735320550415,
						"acc_stderr,none": 0.042324735320550415,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.042520162237633115,
						"acc_stderr,none": 0.042520162237633115,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2490842490842491,
						"acc_norm,none": 0.2490842490842491,
						"acc_norm_stderr,none": 0.026223115500506114,
						"acc_stderr,none": 0.026223115500506114,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.03320574612945431,
						"acc_stderr,none": 0.03320574612945431,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.30994152046783624,
						"acc_norm,none": 0.30994152046783624,
						"acc_norm_stderr,none": 0.035469769593931624,
						"acc_stderr,none": 0.035469769593931624,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.03711513959675177,
						"acc_stderr,none": 0.03711513959675177,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.302158273381295,
						"acc_norm,none": 0.302158273381295,
						"acc_norm_stderr,none": 0.03908914479291562,
						"acc_stderr,none": 0.03908914479291562,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.34591194968553457,
						"acc_norm,none": 0.34591194968553457,
						"acc_norm_stderr,none": 0.037841848841408295,
						"acc_stderr,none": 0.037841848841408295,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.31901840490797545,
						"acc_norm,none": 0.31901840490797545,
						"acc_norm_stderr,none": 0.03661997551073836,
						"acc_stderr,none": 0.03661997551073836,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.0345162887625062,
						"acc_stderr,none": 0.0345162887625062,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2896825396825397,
						"acc_norm,none": 0.2896825396825397,
						"acc_norm_stderr,none": 0.02863192475336099,
						"acc_stderr,none": 0.02863192475336099,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03173071239071724,
						"acc_stderr,none": 0.03173071239071724,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.40336134453781514,
						"acc_norm,none": 0.40336134453781514,
						"acc_norm_stderr,none": 0.031866081214088314,
						"acc_stderr,none": 0.031866081214088314,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.22608695652173913,
						"acc_norm,none": 0.22608695652173913,
						"acc_norm_stderr,none": 0.02764178570724133,
						"acc_stderr,none": 0.02764178570724133,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.34265734265734266,
						"acc_norm,none": 0.34265734265734266,
						"acc_norm_stderr,none": 0.03982738177809643,
						"acc_stderr,none": 0.03982738177809643,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.03448901746724545,
						"acc_stderr,none": 0.03448901746724545,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.33557046979865773,
						"acc_norm,none": 0.33557046979865773,
						"acc_norm_stderr,none": 0.03881373830315734,
						"acc_stderr,none": 0.03881373830315734,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.03661433360410718,
						"acc_stderr,none": 0.03661433360410718,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3220338983050847,
						"acc_norm,none": 0.3220338983050847,
						"acc_norm_stderr,none": 0.04319782230261344,
						"acc_stderr,none": 0.04319782230261344,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.040693063197213754,
						"acc_stderr,none": 0.040693063197213754,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3006993006993007,
						"acc_norm,none": 0.3006993006993007,
						"acc_norm_stderr,none": 0.03848167949490064,
						"acc_stderr,none": 0.03848167949490064,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.042163702135578345,
						"acc_stderr,none": 0.042163702135578345,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336698,
						"acc_stderr,none": 0.03333068663336698,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.36627906976744184,
						"acc_norm,none": 0.36627906976744184,
						"acc_norm_stderr,none": 0.036843172681015855,
						"acc_stderr,none": 0.036843172681015855,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2798053527980535,
						"acc_norm,none": 0.2798053527980535,
						"acc_norm_stderr,none": 0.02216976172592782,
						"acc_stderr,none": 0.02216976172592782,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3878504672897196,
						"acc_norm,none": 0.3878504672897196,
						"acc_norm_stderr,none": 0.03338651735918192,
						"acc_stderr,none": 0.03338651735918192,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2601626016260163,
						"acc_norm,none": 0.2601626016260163,
						"acc_norm_stderr,none": 0.039720129754505354,
						"acc_stderr,none": 0.039720129754505354,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.319672131147541,
						"acc_norm,none": 0.319672131147541,
						"acc_norm_stderr,none": 0.04239540943837383,
						"acc_stderr,none": 0.04239540943837383,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.32857142857142857,
						"acc_norm,none": 0.32857142857142857,
						"acc_norm_stderr,none": 0.03248939796876841,
						"acc_stderr,none": 0.03248939796876841,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3111111111111111,
						"acc_norm,none": 0.3111111111111111,
						"acc_norm_stderr,none": 0.03460236918732731,
						"acc_stderr,none": 0.03460236918732731,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.32275132275132273,
						"acc_norm,none": 0.32275132275132273,
						"acc_norm_stderr,none": 0.03409802097064963,
						"acc_stderr,none": 0.03409802097064963,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.0399037253226882,
						"acc_stderr,none": 0.0399037253226882,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.296551724137931,
						"acc_norm,none": 0.296551724137931,
						"acc_norm_stderr,none": 0.038061426873099935,
						"acc_stderr,none": 0.038061426873099935,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.37142857142857144,
						"acc_norm,none": 0.37142857142857144,
						"acc_norm_stderr,none": 0.04738035414793429,
						"acc_stderr,none": 0.04738035414793429,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.0340385177358705,
						"acc_stderr,none": 0.0340385177358705,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27014218009478674,
						"acc_norm,none": 0.27014218009478674,
						"acc_norm_stderr,none": 0.030641194076293145,
						"acc_stderr,none": 0.030641194076293145,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2473404255319149,
						"acc_norm,none": 0.2473404255319149,
						"acc_norm_stderr,none": 0.022280822212812246,
						"acc_stderr,none": 0.022280822212812246,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.031924831026639656,
						"acc_stderr,none": 0.031924831026639656,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3390804597701149,
						"acc_norm,none": 0.3390804597701149,
						"acc_norm_stderr,none": 0.03599172203897236,
						"acc_stderr,none": 0.03599172203897236,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03944624162501116,
						"acc_stderr,none": 0.03944624162501116,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3274336283185841,
						"acc_norm,none": 0.3274336283185841,
						"acc_norm_stderr,none": 0.031285129400738305,
						"acc_stderr,none": 0.031285129400738305,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.03608541011573967,
						"acc_stderr,none": 0.03608541011573967,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3136094674556213,
						"acc_norm,none": 0.3136094674556213,
						"acc_norm_stderr,none": 0.03579526516456225,
						"acc_stderr,none": 0.03579526516456225,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.32298136645962733,
						"acc_norm,none": 0.32298136645962733,
						"acc_norm_stderr,none": 0.03696826370174651,
						"acc_stderr,none": 0.03696826370174651,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.036121818481912725,
						"acc_stderr,none": 0.036121818481912725,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.00286100001416597,
						"mcc_stderr,none": 0.030802167125592427
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.0348735088019777,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5764203191996188,
						"acc_stderr,none": 0.014022989650814058,
						"alias": "glue",
						"f1,none": 0.6883908762016194,
						"f1_stderr,none": 0.00020394844180764653,
						"mcc,none": 0.00286100001416597,
						"mcc_stderr,none": 0.030802167125592427
					},
					"hellaswag": {
						"acc,none": 0.5264887472615017,
						"acc_norm,none": 0.7086237801234814,
						"acc_norm_stderr,none": 0.004534677750102733,
						"acc_stderr,none": 0.004982774293927773,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7135649136425384,
						"acc_stderr,none": 0.016121843790909866,
						"alias": "lambada",
						"perplexity,none": 3.803736745341393,
						"perplexity_stderr,none": 0.22844847367452314
					},
					"lambada_multilingual": {
						"acc,none": 0.5373568794876771,
						"acc_stderr,none": 0.08485396843250168,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.000058641883392,
						"perplexity_stderr,none": 8.215000706142517
					},
					"lambada_openai": {
						"acc,none": 0.7432563555210557,
						"acc_stderr,none": 0.006085990070284601,
						"alias": " - lambada_openai",
						"perplexity,none": 3.375119526077462,
						"perplexity_stderr,none": 0.06615221087924655
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42751795070832527,
						"acc_stderr,none": 0.0068923954478686475,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.40995706565969,
						"perplexity_stderr,none": 1.9198882405259308
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7432563555210557,
						"acc_stderr,none": 0.006085990070284605,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.376276908213185,
						"perplexity_stderr,none": 0.06624295795502655
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4486706772753736,
						"acc_stderr,none": 0.006929173919665489,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.08178023365845,
						"perplexity_stderr,none": 1.438828440779044
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5476421502037648,
						"acc_stderr,none": 0.006934283157219039,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.477136806072853,
						"perplexity_stderr,none": 0.8029953639024064
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5196972637298661,
						"acc_stderr,none": 0.006960570207731863,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.65514219581279,
						"perplexity_stderr,none": 1.1521232467165174
					},
					"lambada_standard": {
						"acc,none": 0.6838734717640209,
						"acc_stderr,none": 0.006477843064847518,
						"alias": " - lambada_standard",
						"perplexity,none": 4.232316351133247,
						"perplexity_stderr,none": 0.09029889292040708
					},
					"logiqa": {
						"acc,none": 0.2457757296466974,
						"acc_norm,none": 0.28110599078341014,
						"acc_norm_stderr,none": 0.017632374626460008,
						"acc_stderr,none": 0.016887410894296937,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3321464178891896,
						"acc_stderr,none": 0.06091173753049962,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34074074074074073,
						"acc_stderr,none": 0.04094376269996794,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.28289473684210525,
						"acc_stderr,none": 0.03665349695640767,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.36981132075471695,
						"acc_stderr,none": 0.029711421880107922,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3194444444444444,
						"acc_stderr,none": 0.03899073687357336,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.24855491329479767,
						"acc_stderr,none": 0.03295304696818318,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.04092563958237655,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3702127659574468,
						"acc_stderr,none": 0.03156564682236784,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.040493392977481425,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.296551724137931,
						"acc_stderr,none": 0.03806142687309994,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.25132275132275134,
						"acc_stderr,none": 0.022340482339643895,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.30952380952380953,
						"acc_stderr,none": 0.04134913018303316,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.048783173121456316,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.38387096774193546,
						"acc_stderr,none": 0.027666182075539635,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2955665024630542,
						"acc_stderr,none": 0.032104944337514575,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.4727272727272727,
						"acc_stderr,none": 0.03898531605579419,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.37373737373737376,
						"acc_stderr,none": 0.034468977386593325,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.46113989637305697,
						"acc_stderr,none": 0.03597524411734578,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3153846153846154,
						"acc_stderr,none": 0.02355964698318995,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24444444444444444,
						"acc_stderr,none": 0.02620276653465215,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.31092436974789917,
						"acc_stderr,none": 0.030066761582977924,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23841059602649006,
						"acc_stderr,none": 0.034791855725996586,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3743119266055046,
						"acc_stderr,none": 0.020748959408988316,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.17592592592592593,
						"acc_stderr,none": 0.025967420958258526,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.46568627450980393,
						"acc_stderr,none": 0.03501038327635897,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.4388185654008439,
						"acc_stderr,none": 0.032302649315470375,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3721973094170404,
						"acc_stderr,none": 0.03244305283008731,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3511450381679389,
						"acc_stderr,none": 0.04186445163013751,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3256110520722636,
						"acc_stderr,none": 0.059599616018790984,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.34710743801652894,
						"acc_stderr,none": 0.043457245702925355,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04557239513497751,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.34355828220858897,
						"acc_stderr,none": 0.03731133519673892,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.04287858751340456,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.44660194174757284,
						"acc_stderr,none": 0.04922424153458934,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.405982905982906,
						"acc_stderr,none": 0.03217180182641086,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252606,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.44316730523627074,
						"acc_stderr,none": 0.017764085035348404,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.31213872832369943,
						"acc_stderr,none": 0.024946792225272307,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574906,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3431372549019608,
						"acc_stderr,none": 0.02718449890994161,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.37013196009011906,
						"acc_stderr,none": 0.05585574688367252,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3890675241157556,
						"acc_stderr,none": 0.027690337536485376,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.36728395061728397,
						"acc_stderr,none": 0.026822801759507898,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.26595744680851063,
						"acc_stderr,none": 0.026358065698880585,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2966101694915254,
						"acc_stderr,none": 0.011665946586082852,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.35661764705882354,
						"acc_stderr,none": 0.029097209568411952,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.33169934640522875,
						"acc_stderr,none": 0.019047485239360378,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.39090909090909093,
						"acc_stderr,none": 0.04673752333670238,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3183673469387755,
						"acc_stderr,none": 0.029822533793982062,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3539161520961976,
						"acc_stderr,none": 0.048395331555541835,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.4228855721393035,
						"acc_stderr,none": 0.034932317774212816,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.28322232794164287,
						"acc_stderr,none": 0.05862955260080505,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.03711725190740749,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.38596491228070173,
						"acc_stderr,none": 0.03733756969066164,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.37992868059093227,
						"acc_stderr,none": 0.004899466978317793,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.36838893409275836,
						"acc_stderr,none": 0.00486496035089917,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7475490196078431,
						"acc_stderr,none": 0.02153332842706632,
						"alias": " - mrpc",
						"f1,none": 0.8398133748055988,
						"f1_stderr,none": 0.015562063007134155
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.007479224376731302,
						"exact_match_stderr,remove_whitespace": 0.001434183240785568
					},
					"openbookqa": {
						"acc,none": 0.302,
						"acc_norm,none": 0.412,
						"acc_norm_stderr,none": 0.02203367799374087,
						"acc_stderr,none": 0.020553269174209184,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.399,
						"acc_stderr,none": 0.010952601505572451,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.385,
						"acc_stderr,none": 0.010883323176386975,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3725,
						"acc_stderr,none": 0.010813433320184794,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5395,
						"acc_stderr,none": 0.011148184426533283,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5205,
						"acc_stderr,none": 0.011173732641806813,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.484,
						"acc_stderr,none": 0.011177408788874896,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4965,
						"acc_stderr,none": 0.011182862030875934,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.45671428571428574,
						"acc_stderr,none": 0.05326292303210276,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7736670293797606,
						"acc_norm,none": 0.7736670293797606,
						"acc_norm_stderr,none": 0.009763294246879417,
						"acc_stderr,none": 0.009763294246879418,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7432960904796232,
						"acc_norm,none": 0.6296361239391683,
						"acc_norm_stderr,none": 0.010264891483671414,
						"acc_stderr,none": 0.13790091970498453,
						"alias": "pythia",
						"bits_per_byte,none": 0.6325378320103602,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5502896971517328,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.375119526077462,
						"perplexity_stderr,none": 0.06615221087924655,
						"word_perplexity,none": 10.428536711782357,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49679663188724144,
						"acc_stderr,none": 0.006765271702920654,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6767004699480583,
						"acc_stderr,none": 0.0023262386975602825,
						"alias": " - qqp",
						"f1,none": 0.6870795527997893,
						"f1_stderr,none": 0.0025998593604236917
					},
					"record": {
						"alias": "record",
						"em,none": 0.2523,
						"em_stderr,none": 0.004343542061010362,
						"f1,none": 0.26155857166051866,
						"f1_stderr,none": 0.004358518434111173
					},
					"rte": {
						"acc,none": 0.592057761732852,
						"acc_stderr,none": 0.029581952519606197,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.956,
						"acc_norm,none": 0.93,
						"acc_norm_stderr,none": 0.008072494358323502,
						"acc_stderr,none": 0.006488921798427418,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9105504587155964,
						"acc_stderr,none": 0.009670122820901152,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3302969599916217,
						"acc_stderr,none": 0.001652651966553619,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3488372093023256,
						"bleu_acc_stderr,none": 0.016684419859986886,
						"bleu_diff,none": -4.7676025757365466,
						"bleu_diff_stderr,none": 0.9399419703452422,
						"bleu_max,none": 28.583595303310176,
						"bleu_max_stderr,none": 0.8227729682715753,
						"rouge1_acc,none": 0.3157894736842105,
						"rouge1_acc_stderr,none": 0.01627228795791692,
						"rouge1_diff,none": -5.9031435220229875,
						"rouge1_diff_stderr,none": 1.0814414904516871,
						"rouge1_max,none": 53.89250627842206,
						"rouge1_max_stderr,none": 0.8743965162035956,
						"rouge2_acc,none": 0.29008567931456547,
						"rouge2_acc_stderr,none": 0.01588623687420952,
						"rouge2_diff,none": -7.549674880493665,
						"rouge2_diff_stderr,none": 1.2612143030210712,
						"rouge2_max,none": 38.248938544143805,
						"rouge2_max_stderr,none": 1.0558298668477941,
						"rougeL_acc,none": 0.32802937576499386,
						"rougeL_acc_stderr,none": 0.016435632932815032,
						"rougeL_diff,none": -5.8769273011964565,
						"rougeL_diff_stderr,none": 1.098816821965796,
						"rougeL_max,none": 51.29782509190946,
						"rougeL_max_stderr,none": 0.8984521547089732
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3488372093023256,
						"bleu_acc_stderr,none": 0.016684419859986886,
						"bleu_diff,none": -4.7676025757365466,
						"bleu_diff_stderr,none": 0.9399419703452422,
						"bleu_max,none": 28.583595303310176,
						"bleu_max_stderr,none": 0.8227729682715753,
						"rouge1_acc,none": 0.3157894736842105,
						"rouge1_acc_stderr,none": 0.01627228795791692,
						"rouge1_diff,none": -5.9031435220229875,
						"rouge1_diff_stderr,none": 1.0814414904516871,
						"rouge1_max,none": 53.89250627842206,
						"rouge1_max_stderr,none": 0.8743965162035956,
						"rouge2_acc,none": 0.29008567931456547,
						"rouge2_acc_stderr,none": 0.01588623687420952,
						"rouge2_diff,none": -7.549674880493665,
						"rouge2_diff_stderr,none": 1.2612143030210712,
						"rouge2_max,none": 38.248938544143805,
						"rouge2_max_stderr,none": 1.0558298668477941,
						"rougeL_acc,none": 0.32802937576499386,
						"rougeL_acc_stderr,none": 0.016435632932815032,
						"rougeL_diff,none": -5.8769273011964565,
						"rougeL_diff_stderr,none": 1.098816821965796,
						"rougeL_max,none": 51.29782509190946,
						"rougeL_max_stderr,none": 0.8984521547089732
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25458996328029376,
						"acc_stderr,none": 0.015250117079156507,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4060039567029496,
						"acc_stderr,none": 0.014334452088177841,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6325378320103602,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5502896971517328,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.428536711782357,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6764009471191792,
						"acc_stderr,none": 0.013148883320923156,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4225352112676056,
						"acc_stderr,none": 0.05903984205682581,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.04926646390821466,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6218181818181818,
						"acc_stderr,none": 0.0711251373879857,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.02191237788577997,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.02001121929807353,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.01992048320956607,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407053,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.021352091786223104,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.02035437548053008,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.020475118092988978,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43991967871485943,
						"acc_stderr,none": 0.04533624542720319,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.336144578313253,
						"acc_stderr,none": 0.00946863466929353,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.46586345381526106,
						"acc_stderr,none": 0.009998688066102651,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4827309236947791,
						"acc_stderr,none": 0.010016093498409711,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39879518072289155,
						"acc_stderr,none": 0.009814625416137573,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5373493975903615,
						"acc_stderr,none": 0.009994072620561414,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4975903614457831,
						"acc_stderr,none": 0.01002195648306809,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4887550200803213,
						"acc_stderr,none": 0.010019537972975081,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43654618473895584,
						"acc_stderr,none": 0.009941039791133126,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4923694779116466,
						"acc_stderr,none": 0.010020905731542313,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39397590361445783,
						"acc_stderr,none": 0.009794163014906754,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41847389558232934,
						"acc_stderr,none": 0.009887951897505931,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4610441767068273,
						"acc_stderr,none": 0.009991608448389061,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41365461847389556,
						"acc_stderr,none": 0.009871502159099368,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40803212851405624,
						"acc_stderr,none": 0.009851078965044873,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3674698795180723,
						"acc_stderr,none": 0.009663601903728022,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6330545695204861,
						"acc_stderr,none": 0.05302993620393225,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.599602911978822,
						"acc_stderr,none": 0.012609238175551178,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7796161482461945,
						"acc_stderr,none": 0.010666988429058744,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7094639311714097,
						"acc_stderr,none": 0.011683600935499848,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5651886168100596,
						"acc_stderr,none": 0.012757297463352975,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6048974189278623,
						"acc_stderr,none": 0.01258077297613326,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6624751819986764,
						"acc_stderr,none": 0.012168840221678039,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5466578424884183,
						"acc_stderr,none": 0.012810980537828174,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6915949702183984,
						"acc_stderr,none": 0.01188497207331379,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5592322964923891,
						"acc_stderr,none": 0.012776518586332799,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5969556585043018,
						"acc_stderr,none": 0.0126228952159077,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6479152878888154,
						"acc_stderr,none": 0.012291198261674581,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8035513598561475,
						"acc_stderr,none": 0.035442329439520394,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8589247311827957,
						"acc_stderr,none": 0.007220793665802783,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7580813347236705,
						"acc_stderr,none": 0.013835977151777784,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.752851711026616,
						"acc_stderr,none": 0.02664912042079351,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6603174603174603,
						"acc_stderr,none": 0.026726874754294024,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7658730158730159,
						"acc_stderr,none": 0.0188807884850783,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/r3-testchunk2_pth"
	},
	"./rwkv-x-dev/rwkv-230_pth": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6347237880496054,
						"acc_norm,none": 0.629086809470124,
						"acc_norm_stderr,none": 0.09071750881524479,
						"acc_stderr,none": 0.10888339105688964,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.344375,
						"acc_stderr,none": 0.014623330784697323,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8173283582089552,
						"acc_stderr,none": 0.170921758380717,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2761181143153168,
						"acc_norm,none": 0.2761181143153168,
						"acc_norm_stderr,none": 0.04453418837532627,
						"acc_stderr,none": 0.04453418837532627,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5100643163411148,
						"acc_stderr,none": 0.009826836010521584,
						"alias": "glue",
						"f1,none": 0.6352164719516536,
						"f1_stderr,none": 0.00034237755983583,
						"mcc,none": -0.020702674026557004,
						"mcc_stderr,none": 0.013133767897305228
					},
					"lambada": {
						"acc,none": 0.708131185717058,
						"acc_stderr,none": 0.01658278986529497,
						"alias": "lambada",
						"perplexity,none": 3.8672807494322075,
						"perplexity_stderr,none": 0.24976738352764186
					},
					"lambada_multilingual": {
						"acc,none": 0.52963322336503,
						"acc_stderr,none": 0.08605652118687343,
						"alias": "lambada_multilingual",
						"perplexity,none": 22.02112592135555,
						"perplexity_stderr,none": 8.743626761504078
					},
					"mmlu": {
						"acc,none": 0.3023785785500641,
						"acc_stderr,none": 0.04835170606668173,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.29032943676939427,
						"acc_stderr,none": 0.03805569615252467,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3334406179594464,
						"acc_stderr,none": 0.041516662231788515,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3132921676958076,
						"acc_stderr,none": 0.04924841620676432,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2790992705359975,
						"acc_stderr,none": 0.05689333902833598,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.48078571428571426,
						"acc_stderr,none": 0.058930034539571734,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7237715759751767,
						"acc_norm,none": 0.6328976518349325,
						"acc_norm_stderr,none": 0.01019725311514425,
						"acc_stderr,none": 0.1586484521327461,
						"alias": "pythia",
						"bits_per_byte,none": 0.6348088693273369,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5527320278168593,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.393646412338403,
						"perplexity_stderr,none": 0.06715986465283244,
						"word_perplexity,none": 10.516692141293987,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.30592439921042985,
						"acc_stderr,none": 0.001462954312143612,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3047735618115055,
						"bleu_acc_stderr,none": 0.016114124156882452,
						"bleu_diff,none": -8.880744587392929,
						"bleu_diff_stderr,none": 0.8678812004118877,
						"bleu_max,none": 26.41035806044772,
						"bleu_max_stderr,none": 0.8053031354882549,
						"rouge1_acc,none": 0.26805385556915545,
						"rouge1_acc_stderr,none": 0.015506204722834543,
						"rouge1_diff,none": -11.396445439036393,
						"rouge1_diff_stderr,none": 0.9230098266135631,
						"rouge1_max,none": 51.40550320621949,
						"rouge1_max_stderr,none": 0.8807432651825933,
						"rouge2_acc,none": 0.23745410036719705,
						"rouge2_acc_stderr,none": 0.014896277441041845,
						"rouge2_diff,none": -13.823272558593755,
						"rouge2_diff_stderr,none": 1.116589349674441,
						"rouge2_max,none": 35.0485157136745,
						"rouge2_max_stderr,none": 1.0318182579197597,
						"rougeL_acc,none": 0.27539779681762544,
						"rougeL_acc_stderr,none": 0.015638135667775523,
						"rougeL_diff,none": -11.714249444591902,
						"rougeL_diff_stderr,none": 0.9328779894414138,
						"rougeL_max,none": 48.53499046515222,
						"rougeL_max_stderr,none": 0.8998293381798167
					},
					"xcopa": {
						"acc,none": 0.621090909090909,
						"acc_stderr,none": 0.07255915696878039,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43544846050870145,
						"acc_stderr,none": 0.0486267655398956,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6209012694783707,
						"acc_stderr,none": 0.0626235906973835,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8100696785794561,
						"acc_stderr,none": 0.03671883129234059,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6347237880496054,
						"acc_norm,none": 0.629086809470124,
						"acc_norm_stderr,none": 0.09071750881524479,
						"acc_stderr,none": 0.10888339105688964,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.344375,
						"acc_stderr,none": 0.014623330784697323,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.344,
						"acc_stderr,none": 0.015029633724408945,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.341,
						"acc_stderr,none": 0.01499813134840271,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3475,
						"acc_stderr,none": 0.013751753243291852,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4044368600682594,
						"acc_norm,none": 0.4377133105802048,
						"acc_norm_stderr,none": 0.014497573881108288,
						"acc_stderr,none": 0.014342036483436177,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7483164983164983,
						"acc_norm,none": 0.7234848484848485,
						"acc_norm_stderr,none": 0.00917788010146828,
						"acc_stderr,none": 0.008905088235948763,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8173283582089552,
						"acc_stderr,none": 0.170921758380717,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151101,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329848,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346933,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731979,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525047,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.013106173040661764,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.533,
						"acc_stderr,none": 0.015784807891138782,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.012864077288499344,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341678,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469293,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611474,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.00812557844248792,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.00682976175614092,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118746,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823332,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.00843458014024069,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.0046408552592747026,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963528002,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.646,
						"acc_stderr,none": 0.015129868238451773,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.014442734941575023,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.013512312258920838,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704166,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343951,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611481,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.333,
						"acc_stderr,none": 0.01491084616422986,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621243,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.813,
						"acc_stderr,none": 0.012336254828074131,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.597,
						"acc_stderr,none": 0.015518757419066536,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.013699915608779773,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244054,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946092,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919302,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525066,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.014385511563477343,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904602,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.015356947477797575,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.593,
						"acc_stderr,none": 0.01554324910025554,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.015726771166750354,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639236,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.67,
						"acc_stderr,none": 0.014876872027456738,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103308,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787745,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.748,
						"acc_stderr,none": 0.013736254390651155,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061831998,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578234,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651538,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.01053479862085575,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.015801065586651758,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584937,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343972,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611455,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.014267009061031313,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.459,
						"acc_stderr,none": 0.01576602573788216,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936711,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697071,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.581,
						"acc_stderr,none": 0.0156103389675778,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849876,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.0102068692643818,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.798,
						"acc_stderr,none": 0.01270265158765514,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475291,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315162,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240672,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656802,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452371,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.344,
						"acc_stderr,none": 0.015029633724408947,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.014498627873361427,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2761181143153168,
						"acc_norm,none": 0.2761181143153168,
						"acc_norm_stderr,none": 0.04453418837532627,
						"acc_stderr,none": 0.04453418837532627,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.0327931779226895,
						"acc_stderr,none": 0.0327931779226895,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.31756756756756754,
						"acc_norm,none": 0.31756756756756754,
						"acc_norm_stderr,none": 0.03839628734149681,
						"acc_stderr,none": 0.03839628734149681,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.3375,
						"acc_norm,none": 0.3375,
						"acc_norm_stderr,none": 0.03749999999999997,
						"acc_stderr,none": 0.03749999999999997,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.28484848484848485,
						"acc_norm,none": 0.28484848484848485,
						"acc_norm_stderr,none": 0.03524390844511784,
						"acc_stderr,none": 0.03524390844511784,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.291866028708134,
						"acc_norm,none": 0.291866028708134,
						"acc_norm_stderr,none": 0.03152229446041968,
						"acc_stderr,none": 0.03152229446041968,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.03541088558070895,
						"acc_stderr,none": 0.03541088558070895,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2748091603053435,
						"acc_norm,none": 0.2748091603053435,
						"acc_norm_stderr,none": 0.03915345408847836,
						"acc_stderr,none": 0.03915345408847836,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.03921568627450977,
						"acc_stderr,none": 0.03921568627450977,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.27102803738317754,
						"acc_norm,none": 0.27102803738317754,
						"acc_norm_stderr,none": 0.04317273776566668,
						"acc_stderr,none": 0.04317273776566668,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.26625386996904027,
						"acc_norm,none": 0.26625386996904027,
						"acc_norm_stderr,none": 0.024631617986737357,
						"acc_stderr,none": 0.024631617986737357,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2696078431372549,
						"acc_norm,none": 0.2696078431372549,
						"acc_norm_stderr,none": 0.031145570659486782,
						"acc_stderr,none": 0.031145570659486782,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2849162011173184,
						"acc_norm,none": 0.2849162011173184,
						"acc_norm_stderr,none": 0.033831950813285244,
						"acc_stderr,none": 0.033831950813285244,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2616033755274262,
						"acc_norm,none": 0.2616033755274262,
						"acc_norm_stderr,none": 0.028609516716994934,
						"acc_stderr,none": 0.028609516716994934,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.040225592469367126,
						"acc_stderr,none": 0.040225592469367126,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.37383177570093457,
						"acc_norm,none": 0.37383177570093457,
						"acc_norm_stderr,none": 0.046992731189948504,
						"acc_stderr,none": 0.046992731189948504,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.36792452830188677,
						"acc_norm,none": 0.36792452830188677,
						"acc_norm_stderr,none": 0.047061871107614554,
						"acc_stderr,none": 0.047061871107614554,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980982,
						"acc_stderr,none": 0.03957835471980982,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055041,
						"acc_stderr,none": 0.04232473532055041,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.042520162237633115,
						"acc_stderr,none": 0.042520162237633115,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.27472527472527475,
						"acc_norm,none": 0.27472527472527475,
						"acc_norm_stderr,none": 0.02706550456438952,
						"acc_stderr,none": 0.02706550456438952,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2696078431372549,
						"acc_norm,none": 0.2696078431372549,
						"acc_norm_stderr,none": 0.03114557065948678,
						"acc_stderr,none": 0.03114557065948678,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2573099415204678,
						"acc_norm,none": 0.2573099415204678,
						"acc_norm_stderr,none": 0.03352799844161865,
						"acc_stderr,none": 0.03352799844161865,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.03711513959675177,
						"acc_stderr,none": 0.03711513959675177,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2805755395683453,
						"acc_norm,none": 0.2805755395683453,
						"acc_norm_stderr,none": 0.03824529014900686,
						"acc_stderr,none": 0.03824529014900686,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.32075471698113206,
						"acc_norm,none": 0.32075471698113206,
						"acc_norm_stderr,none": 0.03713396279871006,
						"acc_stderr,none": 0.03713396279871006,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2883435582822086,
						"acc_norm,none": 0.2883435582822086,
						"acc_norm_stderr,none": 0.035590395316173425,
						"acc_stderr,none": 0.035590395316173425,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.03430085607014882,
						"acc_stderr,none": 0.03430085607014882,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2976190476190476,
						"acc_norm,none": 0.2976190476190476,
						"acc_norm_stderr,none": 0.028858905984721215,
						"acc_stderr,none": 0.028858905984721215,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.23737373737373738,
						"acc_norm,none": 0.23737373737373738,
						"acc_norm_stderr,none": 0.03031371053819889,
						"acc_stderr,none": 0.03031371053819889,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3487394957983193,
						"acc_norm,none": 0.3487394957983193,
						"acc_norm_stderr,none": 0.030956636328566545,
						"acc_stderr,none": 0.030956636328566545,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.23478260869565218,
						"acc_norm,none": 0.23478260869565218,
						"acc_norm_stderr,none": 0.028009647070930118,
						"acc_stderr,none": 0.028009647070930118,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.22962962962962963,
						"acc_norm,none": 0.22962962962962963,
						"acc_norm_stderr,none": 0.036333844140734636,
						"acc_stderr,none": 0.036333844140734636,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.038221270785361555,
						"acc_stderr,none": 0.038221270785361555,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2897727272727273,
						"acc_norm,none": 0.2897727272727273,
						"acc_norm_stderr,none": 0.034293230802398746,
						"acc_stderr,none": 0.034293230802398746,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.28187919463087246,
						"acc_norm,none": 0.28187919463087246,
						"acc_norm_stderr,none": 0.036982767559851006,
						"acc_stderr,none": 0.036982767559851006,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03744254928577061,
						"acc_stderr,none": 0.03744254928577061,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2627118644067797,
						"acc_norm,none": 0.2627118644067797,
						"acc_norm_stderr,none": 0.04068792432070351,
						"acc_stderr,none": 0.04068792432070351,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.0344500028917346,
						"acc_stderr,none": 0.0344500028917346,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.04013964554072775,
						"acc_stderr,none": 0.04013964554072775,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.27972027972027974,
						"acc_norm,none": 0.27972027972027974,
						"acc_norm_stderr,none": 0.03766763889539852,
						"acc_stderr,none": 0.03766763889539852,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.03932537680392871,
						"acc_stderr,none": 0.03932537680392871,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.03186439492581517,
						"acc_stderr,none": 0.03186439492581517,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.034300856070148815,
						"acc_stderr,none": 0.034300856070148815,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26763990267639903,
						"acc_norm,none": 0.26763990267639903,
						"acc_norm_stderr,none": 0.021864816663672664,
						"acc_stderr,none": 0.021864816663672664,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2897196261682243,
						"acc_norm,none": 0.2897196261682243,
						"acc_norm_stderr,none": 0.031082365543808232,
						"acc_stderr,none": 0.031082365543808232,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.34959349593495936,
						"acc_norm,none": 0.34959349593495936,
						"acc_norm_stderr,none": 0.04317120734620423,
						"acc_stderr,none": 0.04317120734620423,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.319672131147541,
						"acc_norm,none": 0.319672131147541,
						"acc_norm_stderr,none": 0.04239540943837383,
						"acc_stderr,none": 0.04239540943837383,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2761904761904762,
						"acc_norm,none": 0.2761904761904762,
						"acc_norm_stderr,none": 0.030927395843275772,
						"acc_stderr,none": 0.030927395843275772,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.03425177889602087,
						"acc_stderr,none": 0.03425177889602087,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03330267393083602,
						"acc_stderr,none": 0.03330267393083602,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.04126514736324099,
						"acc_stderr,none": 0.04126514736324099,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.25517241379310346,
						"acc_norm,none": 0.25517241379310346,
						"acc_norm_stderr,none": 0.03632984052707841,
						"acc_stderr,none": 0.03632984052707841,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.04429811949614585,
						"acc_stderr,none": 0.04429811949614585,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26285714285714284,
						"acc_norm,none": 0.26285714285714284,
						"acc_norm_stderr,none": 0.033370375852212746,
						"acc_stderr,none": 0.033370375852212746,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767256,
						"acc_stderr,none": 0.030113040167767256,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2632978723404255,
						"acc_norm,none": 0.2632978723404255,
						"acc_norm_stderr,none": 0.022743327388426434,
						"acc_stderr,none": 0.022743327388426434,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3017241379310345,
						"acc_norm,none": 0.3017241379310345,
						"acc_norm_stderr,none": 0.030200390075231467,
						"acc_stderr,none": 0.030200390075231467,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3218390804597701,
						"acc_norm,none": 0.3218390804597701,
						"acc_norm_stderr,none": 0.03551916251914105,
						"acc_stderr,none": 0.03551916251914105,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.038532548365520024,
						"acc_stderr,none": 0.038532548365520024,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.25663716814159293,
						"acc_norm,none": 0.25663716814159293,
						"acc_norm_stderr,none": 0.02911849599823729,
						"acc_stderr,none": 0.02911849599823729,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624337,
						"acc_stderr,none": 0.03546563019624337,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.03273943999002354,
						"acc_stderr,none": 0.03273943999002354,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.0327931779226895,
						"acc_stderr,none": 0.0327931779226895,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2484472049689441,
						"acc_norm,none": 0.2484472049689441,
						"acc_norm_stderr,none": 0.03416149068322981,
						"acc_stderr,none": 0.03416149068322981,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2875,
						"acc_norm,none": 0.2875,
						"acc_norm_stderr,none": 0.035893251060583956,
						"acc_stderr,none": 0.035893251060583956,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.020702674026557004,
						"mcc_stderr,none": 0.013133767897305228
					},
					"copa": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.03015113445777634,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5100643163411148,
						"acc_stderr,none": 0.009826836010521584,
						"alias": "glue",
						"f1,none": 0.6352164719516536,
						"f1_stderr,none": 0.00034237755983583,
						"mcc,none": -0.020702674026557004,
						"mcc_stderr,none": 0.013133767897305228
					},
					"hellaswag": {
						"acc,none": 0.5236008763194583,
						"acc_norm,none": 0.7060346544513045,
						"acc_norm_stderr,none": 0.004546451825028364,
						"acc_stderr,none": 0.004984219681732655,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.708131185717058,
						"acc_stderr,none": 0.01658278986529497,
						"alias": "lambada",
						"perplexity,none": 3.8672807494322075,
						"perplexity_stderr,none": 0.24976738352764186
					},
					"lambada_multilingual": {
						"acc,none": 0.52963322336503,
						"acc_stderr,none": 0.08605652118687343,
						"alias": "lambada_multilingual",
						"perplexity,none": 22.02112592135555,
						"perplexity_stderr,none": 8.743626761504078
					},
					"lambada_openai": {
						"acc,none": 0.7393751212885697,
						"acc_stderr,none": 0.006115788029333533,
						"alias": " - lambada_openai",
						"perplexity,none": 3.393646412338403,
						"perplexity_stderr,none": 0.06715986465283244
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4137395691830002,
						"acc_stderr,none": 0.0068615288414871,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 36.97334674797944,
						"perplexity_stderr,none": 2.066539707429997
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7391810595769455,
						"acc_stderr,none": 0.006117261570238603,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.39496405034886,
						"perplexity_stderr,none": 0.0671587240188186
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4519697263729866,
						"acc_stderr,none": 0.006933763441941934,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.328273768980587,
						"perplexity_stderr,none": 1.4382419168377725
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5394915583155443,
						"acc_stderr,none": 0.006944215621241691,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 17.40381539167411,
						"perplexity_stderr,none": 0.8453637205180871
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5037842033766737,
						"acc_stderr,none": 0.006965778146015217,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 23.00522964779474,
						"perplexity_stderr,none": 1.2248071138198806
					},
					"lambada_standard": {
						"acc,none": 0.6774694352804191,
						"acc_stderr,none": 0.0065124194470117065,
						"alias": " - lambada_standard",
						"perplexity,none": 4.339762818631795,
						"perplexity_stderr,none": 0.09289649963139507
					},
					"logiqa": {
						"acc,none": 0.2119815668202765,
						"acc_norm,none": 0.2764976958525346,
						"acc_norm_stderr,none": 0.017543209075825194,
						"acc_stderr,none": 0.01603099796061939,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3023785785500641,
						"acc_stderr,none": 0.04835170606668173,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.32592592592592595,
						"acc_stderr,none": 0.040491220417025055,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.29605263157894735,
						"acc_stderr,none": 0.03715062154998904,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3584905660377358,
						"acc_stderr,none": 0.02951470358398177,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3194444444444444,
						"acc_stderr,none": 0.03899073687357336,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3352601156069364,
						"acc_stderr,none": 0.03599586301247077,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.043898699568087785,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542129,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.33617021276595743,
						"acc_stderr,none": 0.030881618520676942,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.039994238792813344,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2689655172413793,
						"acc_stderr,none": 0.036951833116502325,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24867724867724866,
						"acc_stderr,none": 0.022261817692400158,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.30158730158730157,
						"acc_stderr,none": 0.04104947269903394,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4064516129032258,
						"acc_stderr,none": 0.02794172734625631,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2660098522167488,
						"acc_stderr,none": 0.031089826002937523,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3151515151515151,
						"acc_stderr,none": 0.0362773057502241,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3787878787878788,
						"acc_stderr,none": 0.03456088731993747,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.43005181347150256,
						"acc_stderr,none": 0.03572954333144808,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.28205128205128205,
						"acc_stderr,none": 0.022815813098896603,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23333333333333334,
						"acc_stderr,none": 0.025787874220959316,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.27310924369747897,
						"acc_stderr,none": 0.02894200404099817,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2119205298013245,
						"acc_stderr,none": 0.03336767086567977,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3174311926605505,
						"acc_stderr,none": 0.0199571521984605,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2037037037037037,
						"acc_stderr,none": 0.027467401804058014,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.35784313725490197,
						"acc_stderr,none": 0.033644872860882996,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.33755274261603374,
						"acc_stderr,none": 0.030781549102026216,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.35874439461883406,
						"acc_stderr,none": 0.03219079200419995,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.33587786259541985,
						"acc_stderr,none": 0.041423137719966634,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.29032943676939427,
						"acc_stderr,none": 0.03805569615252467,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.32231404958677684,
						"acc_stderr,none": 0.04266416363352167,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.28703703703703703,
						"acc_stderr,none": 0.043733130409147614,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3128834355828221,
						"acc_stderr,none": 0.036429145782924055,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.26785714285714285,
						"acc_stderr,none": 0.04203277291467763,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.3786407766990291,
						"acc_stderr,none": 0.04802694698258974,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.358974358974359,
						"acc_stderr,none": 0.03142616993791923,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.37037037037037035,
						"acc_stderr,none": 0.01726860756000578,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2976878612716763,
						"acc_stderr,none": 0.024617055388676996,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2324022346368715,
						"acc_stderr,none": 0.014125968754673403,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2973856209150327,
						"acc_stderr,none": 0.02617390850671858,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3334406179594464,
						"acc_stderr,none": 0.041516662231788515,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3858520900321543,
						"acc_stderr,none": 0.02764814959975147,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.02563082497562134,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2730496453900709,
						"acc_stderr,none": 0.026577860943307857,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.26597131681877445,
						"acc_stderr,none": 0.011285033165551284,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.2867647058823529,
						"acc_stderr,none": 0.027472274473233818,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.29901960784313725,
						"acc_stderr,none": 0.018521756215423024,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.36363636363636365,
						"acc_stderr,none": 0.04607582090719976,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.24489795918367346,
						"acc_stderr,none": 0.027529637440174917,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3132921676958076,
						"acc_stderr,none": 0.04924841620676432,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.39303482587064675,
						"acc_stderr,none": 0.0345368246603156,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2790992705359975,
						"acc_stderr,none": 0.05689333902833598,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.27710843373493976,
						"acc_stderr,none": 0.034843315926805875,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3742690058479532,
						"acc_stderr,none": 0.03711601185389481,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.332348446255731,
						"acc_stderr,none": 0.004754980404968236,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3336045565500407,
						"acc_stderr,none": 0.004755357239978964,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7524509803921569,
						"acc_stderr,none": 0.021393040183721096,
						"alias": " - mrpc",
						"f1,none": 0.8330578512396695,
						"f1_stderr,none": 0.0164269491339398
					},
					"openbookqa": {
						"acc,none": 0.308,
						"acc_norm,none": 0.416,
						"acc_norm_stderr,none": 0.022064943313928866,
						"acc_stderr,none": 0.0206670329874661,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.434,
						"acc_stderr,none": 0.011085280407858916,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.349,
						"acc_stderr,none": 0.010660972196009384,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.438,
						"acc_stderr,none": 0.011096827014281873,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.011130400617630758,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5525,
						"acc_stderr,none": 0.011121318125943093,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.527,
						"acc_stderr,none": 0.011166819105029991,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5165,
						"acc_stderr,none": 0.011177045144808303,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48078571428571426,
						"acc_stderr,none": 0.058930034539571734,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7698585418933623,
						"acc_norm,none": 0.7769314472252449,
						"acc_norm_stderr,none": 0.009713057213018529,
						"acc_stderr,none": 0.00982083282683982,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7237715759751767,
						"acc_norm,none": 0.6328976518349325,
						"acc_norm_stderr,none": 0.01019725311514425,
						"acc_stderr,none": 0.1586484521327461,
						"alias": "pythia",
						"bits_per_byte,none": 0.6348088693273369,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5527320278168593,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.393646412338403,
						"perplexity_stderr,none": 0.06715986465283244,
						"word_perplexity,none": 10.516692141293987,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.504484715357862,
						"acc_stderr,none": 0.006765138405338169,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.5854316101904526,
						"acc_stderr,none": 0.0024501330968434614,
						"alias": " - qqp",
						"f1,none": 0.6335031596440207,
						"f1_stderr,none": 0.002636374625321541
					},
					"record": {
						"alias": "record",
						"em,none": 0.2793,
						"em_stderr,none": 0.004486776584187263,
						"f1,none": 0.28924190500676633,
						"f1_stderr,none": 0.004494914897409621
					},
					"rte": {
						"acc,none": 0.6642599277978339,
						"acc_stderr,none": 0.028426021205818792,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.944,
						"acc_norm,none": 0.928,
						"acc_norm_stderr,none": 0.008178195576218681,
						"acc_stderr,none": 0.007274401481697047,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.8830275229357798,
						"acc_stderr,none": 0.010889799557978598,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.30592439921042985,
						"acc_stderr,none": 0.001462954312143612,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3047735618115055,
						"bleu_acc_stderr,none": 0.016114124156882452,
						"bleu_diff,none": -8.880744587392929,
						"bleu_diff_stderr,none": 0.8678812004118877,
						"bleu_max,none": 26.41035806044772,
						"bleu_max_stderr,none": 0.8053031354882549,
						"rouge1_acc,none": 0.26805385556915545,
						"rouge1_acc_stderr,none": 0.015506204722834543,
						"rouge1_diff,none": -11.396445439036393,
						"rouge1_diff_stderr,none": 0.9230098266135631,
						"rouge1_max,none": 51.40550320621949,
						"rouge1_max_stderr,none": 0.8807432651825933,
						"rouge2_acc,none": 0.23745410036719705,
						"rouge2_acc_stderr,none": 0.014896277441041845,
						"rouge2_diff,none": -13.823272558593755,
						"rouge2_diff_stderr,none": 1.116589349674441,
						"rouge2_max,none": 35.0485157136745,
						"rouge2_max_stderr,none": 1.0318182579197597,
						"rougeL_acc,none": 0.27539779681762544,
						"rougeL_acc_stderr,none": 0.015638135667775523,
						"rougeL_diff,none": -11.714249444591902,
						"rougeL_diff_stderr,none": 0.9328779894414138,
						"rougeL_max,none": 48.53499046515222,
						"rougeL_max_stderr,none": 0.8998293381798167
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3047735618115055,
						"bleu_acc_stderr,none": 0.016114124156882452,
						"bleu_diff,none": -8.880744587392929,
						"bleu_diff_stderr,none": 0.8678812004118877,
						"bleu_max,none": 26.41035806044772,
						"bleu_max_stderr,none": 0.8053031354882549,
						"rouge1_acc,none": 0.26805385556915545,
						"rouge1_acc_stderr,none": 0.015506204722834543,
						"rouge1_diff,none": -11.396445439036393,
						"rouge1_diff_stderr,none": 0.9230098266135631,
						"rouge1_max,none": 51.40550320621949,
						"rouge1_max_stderr,none": 0.8807432651825933,
						"rouge2_acc,none": 0.23745410036719705,
						"rouge2_acc_stderr,none": 0.014896277441041845,
						"rouge2_diff,none": -13.823272558593755,
						"rouge2_diff_stderr,none": 1.116589349674441,
						"rouge2_max,none": 35.0485157136745,
						"rouge2_max_stderr,none": 1.0318182579197597,
						"rougeL_acc,none": 0.27539779681762544,
						"rougeL_acc_stderr,none": 0.015638135667775523,
						"rougeL_diff,none": -11.714249444591902,
						"rougeL_diff_stderr,none": 0.9328779894414138,
						"rougeL_max,none": 48.53499046515222,
						"rougeL_max_stderr,none": 0.8998293381798167
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2350061199510404,
						"acc_stderr,none": 0.014843061507731604,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.37684267846981934,
						"acc_stderr,none": 0.013769607015078787,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6348088693273369,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5527320278168593,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.516692141293987,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6724546172059984,
						"acc_stderr,none": 0.013190169546797017,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5192307692307693,
						"acc_stderr,none": 0.0492300107297805,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.621090909090909,
						"acc_stderr,none": 0.07255915696878039,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.02191237788577997,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.022331264423258383,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.734,
						"acc_stderr,none": 0.01978055967565549,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.019827714859587574,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269948,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.02204949796982787,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.022149790663861926,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.634,
						"acc_stderr,none": 0.021564276850201614,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177524,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.020667032987466104,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43544846050870145,
						"acc_stderr,none": 0.0486267655398956,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.00947697684977859,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4718875502008032,
						"acc_stderr,none": 0.010006219242553597,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4835341365461847,
						"acc_stderr,none": 0.010016636930829971,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39076305220883534,
						"acc_stderr,none": 0.009779967579941793,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5277108433734939,
						"acc_stderr,none": 0.010006669313970325,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4955823293172691,
						"acc_stderr,none": 0.01002168168176935,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4955823293172691,
						"acc_stderr,none": 0.010021681681769354,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43132530120481927,
						"acc_stderr,none": 0.009927090290379253,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.493574297188755,
						"acc_stderr,none": 0.01002124521715939,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.38755020080321284,
						"acc_stderr,none": 0.00976532683221899,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41686746987951806,
						"acc_stderr,none": 0.009882576606533236,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.45220883534136547,
						"acc_stderr,none": 0.009976187086803718,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.4036144578313253,
						"acc_stderr,none": 0.009834096424955387,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.009819585875881304,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3441767068273092,
						"acc_stderr,none": 0.009522954469806036,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6209012694783707,
						"acc_stderr,none": 0.0626235906973835,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5896757114493713,
						"acc_stderr,none": 0.01265848580066339,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.771674387822634,
						"acc_stderr,none": 0.010802042577302282,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7048312375909993,
						"acc_stderr,none": 0.01173786999944211,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5585704831237591,
						"acc_stderr,none": 0.012778538985880637,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6022501654533422,
						"acc_stderr,none": 0.012595197856703514,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6624751819986764,
						"acc_stderr,none": 0.01216884022167803,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5188616810059563,
						"acc_stderr,none": 0.01285796676246499,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6730641958967571,
						"acc_stderr,none": 0.012071771683911358,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5479814692256784,
						"acc_stderr,none": 0.012807742345189277,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5876902713434812,
						"acc_stderr,none": 0.01266769412239704,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6128391793514228,
						"acc_stderr,none": 0.01253517751106737,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8100696785794561,
						"acc_stderr,none": 0.03671883129234059,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8696774193548387,
						"acc_stderr,none": 0.006983463551504551,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109368,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7466110531803962,
						"acc_stderr,none": 0.014052651829226397,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7604562737642585,
						"acc_stderr,none": 0.02636810251019086,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6698412698412698,
						"acc_stderr,none": 0.026538875646287714,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7817460317460317,
						"acc_stderr,none": 0.0184174680241397,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "./rwkv-x-dev/rwkv-230_pth"
	},
	"EleutherAI/gpt-j-6b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5617249154453213,
						"acc_norm,none": 0.5374859075535513,
						"acc_norm_stderr,none": 0.040835912079200204,
						"acc_stderr,none": 0.052744868547689255,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3390625,
						"acc_stderr,none": 0.015781225300437456,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.08315,
						"acc_stderr,none": 0.07025814025631104,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8271343283582089,
						"acc_stderr,none": 0.16272667452524073,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.22808320950965824,
						"acc_norm,none": 0.22808320950965824,
						"acc_norm_stderr,none": 0.11290759176414779,
						"acc_stderr,none": 0.11290759176414779,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2528924192712831,
						"acc_norm,none": 0.2528924192712831,
						"acc_norm_stderr,none": 0.0358206815997799,
						"acc_stderr,none": 0.0358206815997799,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.485014786944904,
						"likelihood_diff_stderr,none": 0.4320929646472229,
						"pct_stereotype,none": 0.5992844364937389,
						"pct_stereotype_stderr,none": 0.08086673061807712
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.022637795275590553,
						"exact_match_stderr,none": 0.0033005770276179373
					},
					"glue": {
						"acc,none": 0.4550234402553109,
						"acc_stderr,none": 0.04519394744416942,
						"alias": "glue",
						"f1,none": 0.3898040816613386,
						"f1_stderr,none": 0.0017209972173419788,
						"mcc,none": -0.020806089559943154,
						"mcc_stderr,none": 0.0008706706445907582
					},
					"kmmlu": {
						"acc,none": 0.17323130233901238,
						"acc_norm,none": 0.17323130233901238,
						"acc_norm_stderr,none": 0.038339168613010705,
						"acc_stderr,none": 0.038339168613010705,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.4748958561718921,
						"acc_norm,none": 0.46,
						"acc_norm_stderr,none": 0.0004977955911823682,
						"acc_stderr,none": 0.03735955247783563,
						"alias": "kobest",
						"f1,none": 0.3779207166846693,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6436056666019794,
						"acc_stderr,none": 0.01819372577834859,
						"alias": "lambada",
						"perplexity,none": 4.906969500428236,
						"perplexity_stderr,none": 0.4132432723416763
					},
					"lambada_cloze": {
						"acc,none": 0.022705220260042694,
						"acc_stderr,none": 0.0027754439771733607,
						"alias": "lambada_cloze",
						"perplexity,none": 680.4368331829941,
						"perplexity_stderr,none": 143.25461308547062
					},
					"lambada_multilingual": {
						"acc,none": 0.41113914224723463,
						"acc_stderr,none": 0.07862437691428391,
						"alias": "lambada_multilingual",
						"perplexity,none": 61.743281543850046,
						"perplexity_stderr,none": 18.81227418401626
					},
					"mmlu": {
						"acc,none": 0.2648483122062384,
						"acc_stderr,none": 0.04270043969823963,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2563230605738576,
						"acc_stderr,none": 0.03386786690089871,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.28355326681686505,
						"acc_stderr,none": 0.04517689484067604,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2642183945401364,
						"acc_stderr,none": 0.040441227184438336,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.25975261655566123,
						"acc_stderr,none": 0.05060138212955545,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.3043293115684883,
						"acc_norm,none": 0.28805039976427615,
						"acc_norm_stderr,none": 0.00012737882966735257,
						"acc_stderr,none": 0.07337574198795123,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.47364285714285714,
						"acc_stderr,none": 0.049622775906176066,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7178123074613596,
						"acc_norm,none": 0.5440739814268382,
						"acc_norm_stderr,none": 0.004232947643798331,
						"acc_stderr,none": 0.15395298084443274,
						"alias": "pythia",
						"bits_per_byte,none": 0.6441185953327017,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.562784222210745,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.109855514494685,
						"perplexity_stderr,none": 0.0884849781693152,
						"word_perplexity,none": 10.88592724737089,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.41134751773049644,
						"acc_norm,none": 0.450354609929078,
						"acc_norm_stderr,none": 0.04844321154297932,
						"acc_stderr,none": 0.03671293143688081,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5660710126118931,
						"acc_stderr,none": 0.03652281469368929,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.30808319703653325,
						"acc_stderr,none": 0.05076102004868316,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2998776009791922,
						"bleu_acc_stderr,none": 0.00025729292329675844,
						"bleu_diff,none": -6.667858182648117,
						"bleu_diff_stderr,none": 0.5945693598660794,
						"bleu_max,none": 22.668450148573267,
						"bleu_max_stderr,none": 0.5419271742300632,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.00023475821985345906,
						"rouge1_diff,none": -10.019734436908287,
						"rouge1_diff_stderr,none": 0.8410534379202882,
						"rouge1_max,none": 45.737723487827154,
						"rouge1_max_stderr,none": 0.8122598704941005,
						"rouge2_acc,none": 0.204406364749082,
						"rouge2_acc_stderr,none": 0.00019929461127346601,
						"rouge2_diff,none": -10.871699444365984,
						"rouge2_diff_stderr,none": 1.0803246765266343,
						"rouge2_max,none": 29.424974644198702,
						"rouge2_max_stderr,none": 0.9969156641384315,
						"rougeL_acc,none": 0.2484700122399021,
						"rougeL_acc_stderr,none": 0.000228839050560548,
						"rougeL_diff,none": -10.186938529736118,
						"rougeL_diff_stderr,none": 0.8605854577587904,
						"rougeL_max,none": 43.12716790628842,
						"rougeL_max_stderr,none": 0.8138546469257434
					},
					"xcopa": {
						"acc,none": 0.5441818181818182,
						"acc_stderr,none": 0.03440380430176429,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4138688085676038,
						"acc_stderr,none": 0.05358123052295437,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5490042717044703,
						"acc_stderr,none": 0.057401991595252236,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7424140256237357,
						"acc_stderr,none": 0.06344495078776032,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5617249154453213,
						"acc_norm,none": 0.5374859075535513,
						"acc_norm_stderr,none": 0.040835912079200204,
						"acc_stderr,none": 0.052744868547689255,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3390625,
						"acc_stderr,none": 0.015781225300437456,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.325,
						"acc_stderr,none": 0.014818724459095522,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.337,
						"acc_stderr,none": 0.014955087918653607,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3525,
						"acc_stderr,none": 0.013797164918918367,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3395904436860068,
						"acc_norm,none": 0.36860068259385664,
						"acc_norm_stderr,none": 0.014097810678042192,
						"acc_stderr,none": 0.013839039762820167,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6712962962962963,
						"acc_norm,none": 0.6207912457912458,
						"acc_norm_stderr,none": 0.009955891668865569,
						"acc_stderr,none": 0.009638903167022171,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.08315,
						"acc_stderr,none": 0.07025814025631104,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.088,
						"acc_stderr,none": 0.00633625078709952,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.2435,
						"acc_stderr,none": 0.009599476546926207,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.138,
						"acc_stderr,none": 0.00771412690308757,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.217,
						"acc_stderr,none": 0.009219435937165715,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0845,
						"acc_stderr,none": 0.0062208700848278824,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.047,
						"acc_stderr,none": 0.004733571944280044,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.007,
						"acc_stderr,none": 0.0018647355360237453,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.006,
						"acc_stderr,none": 0.0017272787111155075,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000127,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0021691973969631237,
						"acc_stderr,none": 0.0009692521054558677,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8271343283582089,
						"acc_stderr,none": 0.16272667452524073,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523734,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987295,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844881,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.812,
						"acc_stderr,none": 0.012361586015103761,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032140007,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.012975838021968777,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.0157633906404837,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.776,
						"acc_stderr,none": 0.013190830072364478,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747401,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.001000000000000003,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462438024,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.0055683935750813615,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584935,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427421,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942307,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832015,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611448,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.012259457340938553,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.01447084674113472,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.011628164696727186,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118587,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.817,
						"acc_stderr,none": 0.012233587399477828,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.393,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037188,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.01286407728849934,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.651,
						"acc_stderr,none": 0.015080663991563098,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.773,
						"acc_stderr,none": 0.013253174964763907,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163042,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.00905439020486644,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074796,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333347,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.695,
						"acc_stderr,none": 0.014566646394664392,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785133,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.015663503610155283,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.634,
						"acc_stderr,none": 0.015240612726405749,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.015782683329937625,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679195,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.601,
						"acc_stderr,none": 0.01549319331316291,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037186,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400243,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.012864077288499347,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323494,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000033,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787731,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381791,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.423,
						"acc_stderr,none": 0.015630589090476342,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244049,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998435,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.756,
						"acc_stderr,none": 0.013588548437881447,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.389,
						"acc_stderr,none": 0.015424555647308488,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397224,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695798,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.015583544104177524,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653893,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.01028132801274739,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.843,
						"acc_stderr,none": 0.011510146979230187,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357801,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291603,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345703,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611466,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.404,
						"acc_stderr,none": 0.015524980677122583,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.363,
						"acc_stderr,none": 0.015213890444671287,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6568807339449542,
						"acc_stderr,none": 0.008303445777655944,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.35714285714285715,
						"acc_stderr,none": 0.06460957383809221,
						"alias": "cb",
						"f1,none": 0.25801910507792863,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.22808320950965824,
						"acc_norm,none": 0.22808320950965824,
						"acc_norm_stderr,none": 0.11290759176414779,
						"acc_stderr,none": 0.11290759176414779,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736841,
						"acc_stderr,none": 0.05263157894736841,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.07226812131946557,
						"acc_stderr,none": 0.07226812131946557,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996391,
						"acc_stderr,none": 0.08081046758996391,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.23404255319148937,
						"acc_norm,none": 0.23404255319148937,
						"acc_norm_stderr,none": 0.06242676343682882,
						"acc_stderr,none": 0.06242676343682882,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.06060606060606061,
						"acc_stderr,none": 0.06060606060606061,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736842,
						"acc_stderr,none": 0.05263157894736842,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.12903225806451613,
						"acc_norm,none": 0.12903225806451613,
						"acc_norm_stderr,none": 0.06120537406777509,
						"acc_stderr,none": 0.06120537406777509,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031764,
						"acc_stderr,none": 0.07633651333031764,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.0,
						"acc_norm,none": 0.0,
						"acc_norm_stderr,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.14285714285714285,
						"acc_norm,none": 0.14285714285714285,
						"acc_norm_stderr,none": 0.07824607964359517,
						"acc_stderr,none": 0.07824607964359517,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.0,
						"acc_norm,none": 0.0,
						"acc_norm_stderr,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.0652050663696626,
						"acc_stderr,none": 0.0652050663696626,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271772,
						"acc_stderr,none": 0.10163945352271772,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.08446516354424752,
						"acc_stderr,none": 0.08446516354424752,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.06957698714453991,
						"acc_stderr,none": 0.06957698714453991,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.061487546190134544,
						"acc_stderr,none": 0.061487546190134544,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2528924192712831,
						"acc_norm,none": 0.2528924192712831,
						"acc_norm_stderr,none": 0.0358206815997799,
						"acc_stderr,none": 0.0358206815997799,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.036628698766429046,
						"acc_stderr,none": 0.036628698766429046,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018759,
						"acc_stderr,none": 0.03489370652018759,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2535885167464115,
						"acc_norm,none": 0.2535885167464115,
						"acc_norm_stderr,none": 0.03016631629884799,
						"acc_stderr,none": 0.03016631629884799,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865143,
						"acc_stderr,none": 0.03462157845865143,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2595419847328244,
						"acc_norm,none": 0.2595419847328244,
						"acc_norm_stderr,none": 0.03844876139785271,
						"acc_stderr,none": 0.03844876139785271,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.037970424962817856,
						"acc_stderr,none": 0.037970424962817856,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2616822429906542,
						"acc_norm,none": 0.2616822429906542,
						"acc_norm_stderr,none": 0.04269291915728109,
						"acc_stderr,none": 0.04269291915728109,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25077399380804954,
						"acc_norm,none": 0.25077399380804954,
						"acc_norm_stderr,none": 0.024155705949743284,
						"acc_stderr,none": 0.024155705949743284,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.25980392156862747,
						"acc_norm,none": 0.25980392156862747,
						"acc_norm_stderr,none": 0.030778554678693254,
						"acc_stderr,none": 0.030778554678693254,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2569832402234637,
						"acc_norm,none": 0.2569832402234637,
						"acc_norm_stderr,none": 0.03275229252356165,
						"acc_stderr,none": 0.03275229252356165,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24472573839662448,
						"acc_norm,none": 0.24472573839662448,
						"acc_norm_stderr,none": 0.027985699387036402,
						"acc_stderr,none": 0.027985699387036402,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800374,
						"acc_stderr,none": 0.04142972007800374,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.045223500773820306,
						"acc_stderr,none": 0.045223500773820306,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.044801270921106716,
						"acc_stderr,none": 0.044801270921106716,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980982,
						"acc_stderr,none": 0.03957835471980982,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.025825054502221043,
						"acc_stderr,none": 0.025825054502221043,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604246,
						"acc_stderr,none": 0.030587591351604246,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.23976608187134502,
						"acc_norm,none": 0.23976608187134502,
						"acc_norm_stderr,none": 0.03274485211946956,
						"acc_stderr,none": 0.03274485211946956,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.03653847510896056,
						"acc_stderr,none": 0.03653847510896056,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.26618705035971224,
						"acc_norm,none": 0.26618705035971224,
						"acc_norm_stderr,none": 0.03762240935089088,
						"acc_stderr,none": 0.03762240935089088,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.23270440251572327,
						"acc_norm,none": 0.23270440251572327,
						"acc_norm_stderr,none": 0.033616702408095465,
						"acc_stderr,none": 0.033616702408095465,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25153374233128833,
						"acc_norm,none": 0.25153374233128833,
						"acc_norm_stderr,none": 0.03408997886857529,
						"acc_stderr,none": 0.03408997886857529,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2616279069767442,
						"acc_norm,none": 0.2616279069767442,
						"acc_norm_stderr,none": 0.03361101403890495,
						"acc_stderr,none": 0.03361101403890495,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.02839429305079051,
						"acc_stderr,none": 0.02839429305079051,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2474747474747475,
						"acc_norm,none": 0.2474747474747475,
						"acc_norm_stderr,none": 0.03074630074212451,
						"acc_stderr,none": 0.03074630074212451,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.27310924369747897,
						"acc_norm,none": 0.27310924369747897,
						"acc_norm_stderr,none": 0.028942004040998167,
						"acc_stderr,none": 0.028942004040998167,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26956521739130435,
						"acc_norm,none": 0.26956521739130435,
						"acc_norm_stderr,none": 0.029322764228949517,
						"acc_stderr,none": 0.029322764228949517,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.22962962962962963,
						"acc_norm,none": 0.22962962962962963,
						"acc_norm_stderr,none": 0.03633384414073462,
						"acc_stderr,none": 0.03633384414073462,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.23776223776223776,
						"acc_norm,none": 0.23776223776223776,
						"acc_norm_stderr,none": 0.0357250214181557,
						"acc_stderr,none": 0.0357250214181557,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26136363636363635,
						"acc_norm,none": 0.26136363636363635,
						"acc_norm_stderr,none": 0.033213825516355905,
						"acc_stderr,none": 0.033213825516355905,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.22818791946308725,
						"acc_norm,none": 0.22818791946308725,
						"acc_norm_stderr,none": 0.03449619964127221,
						"acc_stderr,none": 0.03449619964127221,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.039803298549204315,
						"acc_stderr,none": 0.039803298549204315,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.03642192783741706,
						"acc_stderr,none": 0.03642192783741706,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.037184890068181146,
						"acc_stderr,none": 0.037184890068181146,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.03186439492581517,
						"acc_stderr,none": 0.03186439492581517,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.24574209245742093,
						"acc_norm,none": 0.24574209245742093,
						"acc_norm_stderr,none": 0.021262179663182224,
						"acc_stderr,none": 0.021262179663182224,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2570093457943925,
						"acc_norm,none": 0.2570093457943925,
						"acc_norm_stderr,none": 0.029941691533244642,
						"acc_stderr,none": 0.029941691533244642,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2601626016260163,
						"acc_norm,none": 0.2601626016260163,
						"acc_norm_stderr,none": 0.039720129754505354,
						"acc_stderr,none": 0.039720129754505354,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2540983606557377,
						"acc_norm,none": 0.2540983606557377,
						"acc_norm_stderr,none": 0.03957756102798664,
						"acc_stderr,none": 0.03957756102798664,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24285714285714285,
						"acc_norm,none": 0.24285714285714285,
						"acc_norm_stderr,none": 0.029661370413965837,
						"acc_stderr,none": 0.029661370413965837,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25555555555555554,
						"acc_norm,none": 0.25555555555555554,
						"acc_norm_stderr,none": 0.03260110304027645,
						"acc_stderr,none": 0.03260110304027645,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03174603174603176,
						"acc_stderr,none": 0.03174603174603176,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.040832215386495736,
						"acc_stderr,none": 0.040832215386495736,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.03565998174135303,
						"acc_stderr,none": 0.03565998174135303,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.032888897342098225,
						"acc_stderr,none": 0.032888897342098225,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846666,
						"acc_stderr,none": 0.030469670650846666,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2579787234042553,
						"acc_norm,none": 0.2579787234042553,
						"acc_norm_stderr,none": 0.022593550801056263,
						"acc_stderr,none": 0.022593550801056263,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.27155172413793105,
						"acc_norm,none": 0.27155172413793105,
						"acc_norm_stderr,none": 0.029263054233931916,
						"acc_stderr,none": 0.029263054233931916,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2471264367816092,
						"acc_norm,none": 0.2471264367816092,
						"acc_norm_stderr,none": 0.032794240385439676,
						"acc_stderr,none": 0.032794240385439676,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.03712537833614866,
						"acc_stderr,none": 0.03712537833614866,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.25663716814159293,
						"acc_norm,none": 0.25663716814159293,
						"acc_norm_stderr,none": 0.02911849599823729,
						"acc_stderr,none": 0.02911849599823729,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03346409881055953,
						"acc_stderr,none": 0.03346409881055953,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.03209281645145386,
						"acc_stderr,none": 0.03209281645145386,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.03250593287417368,
						"acc_stderr,none": 0.03250593287417368,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2422360248447205,
						"acc_norm,none": 0.2422360248447205,
						"acc_norm_stderr,none": 0.03387086996153082,
						"acc_stderr,none": 0.03387086996153082,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.03404916326237584,
						"acc_stderr,none": 0.03404916326237584,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.020806089559943154,
						"mcc_stderr,none": 0.029507128708004753
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.0348735088019777,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.485014786944904,
						"likelihood_diff_stderr,none": 0.4320929646472229,
						"pct_stereotype,none": 0.5992844364937389,
						"pct_stereotype_stderr,none": 0.08086673061807712
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.542758510591874,
						"likelihood_diff_stderr,none": 0.08720718850711012,
						"pct_stereotype,none": 0.6547406082289803,
						"pct_stereotype_stderr,none": 0.01161369408556993
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.8648602831494676,
						"likelihood_diff_stderr,none": 0.3910878170741051,
						"pct_stereotype,none": 0.6813186813186813,
						"pct_stereotype_stderr,none": 0.04911704114831278
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.1039628115567295,
						"likelihood_diff_stderr,none": 1.6017034884242818,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.419645309448242,
						"likelihood_diff_stderr,none": 0.6485312414629641,
						"pct_stereotype,none": 0.7076923076923077,
						"pct_stereotype_stderr,none": 0.05685286730420954
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.6587774217128755,
						"likelihood_diff_stderr,none": 0.18155260658016478,
						"pct_stereotype,none": 0.659375,
						"pct_stereotype_stderr,none": 0.0265343929755315
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.425131841942116,
						"likelihood_diff_stderr,none": 0.2253731340382056,
						"pct_stereotype,none": 0.5648148148148148,
						"pct_stereotype_stderr,none": 0.033812000056435254
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.779647774166531,
						"likelihood_diff_stderr,none": 0.35440412458382303,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.3010505466010627,
						"likelihood_diff_stderr,none": 0.1472038932777811,
						"pct_stereotype,none": 0.5787401574803149,
						"pct_stereotype_stderr,none": 0.021928698676414303
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.8243303384866802,
						"likelihood_diff_stderr,none": 0.3418359865930741,
						"pct_stereotype,none": 0.8378378378378378,
						"pct_stereotype_stderr,none": 0.03514458387408102
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.322493747998309,
						"likelihood_diff_stderr,none": 0.4305921708829394,
						"pct_stereotype,none": 0.8279569892473119,
						"pct_stereotype_stderr,none": 0.03934852812061865
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 3.94827130970202,
						"likelihood_diff_stderr,none": 0.24621294001141578,
						"pct_stereotype,none": 0.6789473684210526,
						"pct_stereotype_stderr,none": 0.03396059335824887
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.4283171342680263,
						"likelihood_diff_stderr,none": 0.08011675161360697,
						"pct_stereotype,none": 0.5438282647584973,
						"pct_stereotype_stderr,none": 0.012166287275376286
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 2.964279513888889,
						"likelihood_diff_stderr,none": 0.3026891961020988,
						"pct_stereotype,none": 0.5555555555555556,
						"pct_stereotype_stderr,none": 0.05267171812666418
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.1190584622896633,
						"likelihood_diff_stderr,none": 0.5923000376920594,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.664484024047852,
						"likelihood_diff_stderr,none": 0.4240312297896494,
						"pct_stereotype,none": 0.6515151515151515,
						"pct_stereotype_stderr,none": 0.059101367791192905
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.0914496484203875,
						"likelihood_diff_stderr,none": 0.17075163838359492,
						"pct_stereotype,none": 0.5420560747663551,
						"pct_stereotype_stderr,none": 0.027851800131188018
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.8594471023016768,
						"likelihood_diff_stderr,none": 0.1956632349019217,
						"pct_stereotype,none": 0.383399209486166,
						"pct_stereotype_stderr,none": 0.030628616122857773
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.497151427798801,
						"likelihood_diff_stderr,none": 0.45419973118301726,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.05594542388644592
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.2243352143660835,
						"likelihood_diff_stderr,none": 0.15332470163279413,
						"pct_stereotype,none": 0.44130434782608696,
						"pct_stereotype_stderr,none": 0.02317663632830031
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.3678014340608016,
						"likelihood_diff_stderr,none": 0.3050900562257792,
						"pct_stereotype,none": 0.6869565217391305,
						"pct_stereotype_stderr,none": 0.043432470166108225
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.0681575314029232,
						"likelihood_diff_stderr,none": 0.27727149432621345,
						"pct_stereotype,none": 0.7362637362637363,
						"pct_stereotype_stderr,none": 0.04644942852497395
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.8776524991405252,
						"likelihood_diff_stderr,none": 0.2760528931060364,
						"pct_stereotype,none": 0.7448979591836735,
						"pct_stereotype_stderr,none": 0.031216776356482227
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.022637795275590553,
						"exact_match_stderr,none": 0.0033005770276179373
					},
					"glue": {
						"acc,none": 0.4550234402553109,
						"acc_stderr,none": 0.04519394744416942,
						"alias": "glue",
						"f1,none": 0.3898040816613386,
						"f1_stderr,none": 0.0017209972173419788,
						"mcc,none": -0.020806089559943154,
						"mcc_stderr,none": 0.0008706706445907582
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.030326004548900682,
						"exact_match_stderr,get-answer": 0.0047234874655147484
					},
					"hellaswag": {
						"acc,none": 0.4955188209520016,
						"acc_norm,none": 0.6625174268074089,
						"acc_norm_stderr,none": 0.004718846448021783,
						"acc_stderr,none": 0.004989581008163193,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.17323130233901238,
						"acc_norm,none": 0.17323130233901238,
						"acc_norm_stderr,none": 0.038339168613010705,
						"acc_stderr,none": 0.038339168613010705,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.03861229196653697,
						"acc_stderr,none": 0.03861229196653697,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.154,
						"acc_norm,none": 0.154,
						"acc_norm_stderr,none": 0.011419913065098684,
						"acc_stderr,none": 0.011419913065098684,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.165,
						"acc_norm,none": 0.165,
						"acc_norm_stderr,none": 0.011743632866916157,
						"acc_stderr,none": 0.011743632866916157,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.217,
						"acc_norm,none": 0.217,
						"acc_norm_stderr,none": 0.01304151375727071,
						"acc_stderr,none": 0.01304151375727071,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.231,
						"acc_norm,none": 0.231,
						"acc_norm_stderr,none": 0.013334797216936443,
						"acc_stderr,none": 0.013334797216936443,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.017692419360790187,
						"acc_stderr,none": 0.017692419360790187,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.132,
						"acc_norm,none": 0.132,
						"acc_norm_stderr,none": 0.010709373963528022,
						"acc_stderr,none": 0.010709373963528022,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.116,
						"acc_norm,none": 0.116,
						"acc_norm_stderr,none": 0.010131468138756997,
						"acc_stderr,none": 0.010131468138756997,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.144,
						"acc_norm,none": 0.144,
						"acc_norm_stderr,none": 0.01110798754893915,
						"acc_stderr,none": 0.01110798754893915,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.02912242397001744,
						"acc_stderr,none": 0.02912242397001744,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.099,
						"acc_norm,none": 0.099,
						"acc_norm_stderr,none": 0.009449248027662779,
						"acc_stderr,none": 0.009449248027662779,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.35384615384615387,
						"acc_norm,none": 0.35384615384615387,
						"acc_norm_stderr,none": 0.04209983089826262,
						"acc_stderr,none": 0.04209983089826262,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.193,
						"acc_norm,none": 0.193,
						"acc_norm_stderr,none": 0.012486268734370094,
						"acc_stderr,none": 0.012486268734370094,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.128,
						"acc_norm,none": 0.128,
						"acc_norm_stderr,none": 0.010570133761108663,
						"acc_stderr,none": 0.010570133761108663,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.202,
						"acc_norm,none": 0.202,
						"acc_norm_stderr,none": 0.01270265158765513,
						"acc_stderr,none": 0.01270265158765513,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.159,
						"acc_norm,none": 0.159,
						"acc_norm_stderr,none": 0.011569479368271298,
						"acc_stderr,none": 0.011569479368271298,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.174,
						"acc_norm,none": 0.174,
						"acc_norm_stderr,none": 0.01199449323097343,
						"acc_stderr,none": 0.01199449323097343,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.171,
						"acc_norm,none": 0.171,
						"acc_norm_stderr,none": 0.011912216456264595,
						"acc_stderr,none": 0.011912216456264595,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.199,
						"acc_norm,none": 0.199,
						"acc_norm_stderr,none": 0.012631649083099186,
						"acc_stderr,none": 0.012631649083099186,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.182,
						"acc_norm,none": 0.182,
						"acc_norm_stderr,none": 0.01220758063766216,
						"acc_stderr,none": 0.01220758063766216,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816505,
						"acc_stderr,none": 0.04229525846816505,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.133,
						"acc_norm,none": 0.133,
						"acc_norm_stderr,none": 0.010743669132397344,
						"acc_stderr,none": 0.010743669132397344,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.134,
						"acc_norm,none": 0.134,
						"acc_norm_stderr,none": 0.010777762298369683,
						"acc_stderr,none": 0.010777762298369683,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.137,
						"acc_norm,none": 0.137,
						"acc_norm_stderr,none": 0.010878848714333315,
						"acc_stderr,none": 0.010878848714333315,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.013394902889660009,
						"acc_stderr,none": 0.013394902889660009,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.191,
						"acc_norm,none": 0.191,
						"acc_norm_stderr,none": 0.012436787112179479,
						"acc_stderr,none": 0.012436787112179479,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.189,
						"acc_norm,none": 0.189,
						"acc_norm_stderr,none": 0.012386784588117717,
						"acc_stderr,none": 0.012386784588117717,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.20333333333333334,
						"acc_norm,none": 0.20333333333333334,
						"acc_norm_stderr,none": 0.01644482294881425,
						"acc_stderr,none": 0.01644482294881425,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.115,
						"acc_norm,none": 0.115,
						"acc_norm_stderr,none": 0.010093407594904612,
						"acc_stderr,none": 0.010093407594904612,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.171,
						"acc_norm,none": 0.171,
						"acc_norm_stderr,none": 0.0119122164562646,
						"acc_stderr,none": 0.0119122164562646,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.183,
						"acc_norm,none": 0.183,
						"acc_norm_stderr,none": 0.012233587399477823,
						"acc_stderr,none": 0.012233587399477823,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.159,
						"acc_norm,none": 0.159,
						"acc_norm_stderr,none": 0.0115694793682713,
						"acc_stderr,none": 0.0115694793682713,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23666666666666666,
						"acc_norm,none": 0.23666666666666666,
						"acc_norm_stderr,none": 0.024580463430538727,
						"acc_stderr,none": 0.024580463430538727,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920835,
						"acc_stderr,none": 0.013512312258920835,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.147,
						"acc_norm,none": 0.147,
						"acc_norm_stderr,none": 0.01120341539516033,
						"acc_stderr,none": 0.01120341539516033,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.206,
						"acc_norm,none": 0.206,
						"acc_norm_stderr,none": 0.01279561361278655,
						"acc_stderr,none": 0.01279561361278655,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.175,
						"acc_norm,none": 0.175,
						"acc_norm_stderr,none": 0.026935153843310695,
						"acc_stderr,none": 0.026935153843310695,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.01265543994336667,
						"acc_stderr,none": 0.01265543994336667,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.17,
						"acc_norm,none": 0.17,
						"acc_norm_stderr,none": 0.011884495834541669,
						"acc_stderr,none": 0.011884495834541669,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.126,
						"acc_norm,none": 0.126,
						"acc_norm_stderr,none": 0.01049924922240802,
						"acc_stderr,none": 0.01049924922240802,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.4748958561718921,
						"acc_norm,none": 0.46,
						"acc_norm_stderr,none": 0.0004977955911823682,
						"acc_stderr,none": 0.03735955247783563,
						"alias": "kobest",
						"f1,none": 0.3779207166846693,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.503,
						"acc_stderr,none": 0.015819015179246724,
						"alias": " - kobest_copa",
						"f1,none": 0.5014920133403546,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.35,
						"acc_norm,none": 0.46,
						"acc_norm_stderr,none": 0.022311333245289673,
						"acc_stderr,none": 0.021352091786223104,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3474208341335274,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.42317380352644834,
						"acc_stderr,none": 0.024827573845811267,
						"alias": " - kobest_sentineg",
						"f1,none": 0.4178401050171293,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6436056666019794,
						"acc_stderr,none": 0.01819372577834859,
						"alias": "lambada",
						"perplexity,none": 4.906969500428236,
						"perplexity_stderr,none": 0.4132432723416763
					},
					"lambada_cloze": {
						"acc,none": 0.022705220260042694,
						"acc_stderr,none": 0.0027754439771733607,
						"alias": "lambada_cloze",
						"perplexity,none": 680.4368331829941,
						"perplexity_stderr,none": 143.25461308547062
					},
					"lambada_multilingual": {
						"acc,none": 0.41113914224723463,
						"acc_stderr,none": 0.07862437691428391,
						"alias": "lambada_multilingual",
						"perplexity,none": 61.743281543850046,
						"perplexity_stderr,none": 18.81227418401626
					},
					"lambada_openai": {
						"acc,none": 0.6778575587036678,
						"acc_stderr,none": 0.0065103639427392754,
						"alias": " - lambada_openai",
						"perplexity,none": 4.109855514494685,
						"perplexity_stderr,none": 0.0884849781693152
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.01901804773918106,
						"acc_stderr,none": 0.001902941985094661,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 399.93172688281,
						"perplexity_stderr,none": 14.278572741063675
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3120512322918688,
						"acc_stderr,none": 0.0064551014528429025,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 82.2136982560527,
						"perplexity_stderr,none": 4.894409420012391
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6768872501455463,
						"acc_stderr,none": 0.0065154930732499675,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.1118154240616205,
						"perplexity_stderr,none": 0.0885830489704497
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.32641179895206673,
						"acc_stderr,none": 0.006532692754359019,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 83.800381643966,
						"perplexity_stderr,none": 4.58944244709407
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.4071414709877741,
						"acc_stderr,none": 0.006844792382678513,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 51.780202674131694,
						"perplexity_stderr,none": 2.9022534629555268
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.3332039588589171,
						"acc_stderr,none": 0.0065669491818204535,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 86.8103097210382,
						"perplexity_stderr,none": 5.18405231797384
					},
					"lambada_standard": {
						"acc,none": 0.6097418979235397,
						"acc_stderr,none": 0.0067961202715497195,
						"alias": " - lambada_standard",
						"perplexity,none": 5.7016801655756595,
						"perplexity_stderr,none": 0.13375347996543016
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.026392392780904328,
						"acc_stderr,none": 0.0022332813288584693,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 960.941939483178,
						"perplexity_stderr,none": 38.661097810383936
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.28435114503816794,
						"exact_match_stderr,get-answer": 0.01138123494258704
					},
					"logiqa": {
						"acc,none": 0.2119815668202765,
						"acc_norm,none": 0.29185867895545314,
						"acc_norm_stderr,none": 0.017831570553971922,
						"acc_stderr,none": 0.0160309979606194,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.23155216284987276,
						"acc_norm,none": 0.27544529262086515,
						"acc_norm_stderr,none": 0.011271070752009223,
						"acc_stderr,none": 0.010642496713710913,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2633165829145729,
						"acc_norm,none": 0.26767169179229483,
						"acc_norm_stderr,none": 0.008105031808599693,
						"acc_stderr,none": 0.00806269335609449,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.40086846007201865,
						"acc_stderr,none": 0.005043744010958537,
						"alias": "mc_taco",
						"f1,none": 0.5072728856371396,
						"f1_stderr,none": 0.005712532989740299
					},
					"medmcqa": {
						"acc,none": 0.3019364092756395,
						"acc_norm,none": 0.3019364092756395,
						"acc_norm_stderr,none": 0.007099262293691155,
						"acc_stderr,none": 0.007099262293691155,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.26394344069128045,
						"acc_norm,none": 0.26394344069128045,
						"acc_norm_stderr,none": 0.012358548743674917,
						"acc_stderr,none": 0.012358548743674917,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.2648483122062384,
						"acc_stderr,none": 0.04270043969823963,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.23703703703703705,
						"acc_stderr,none": 0.03673731683969506,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.27631578947368424,
						"acc_stderr,none": 0.03639057569952925,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.25660377358490566,
						"acc_stderr,none": 0.02688064788905197,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.03476590104304134,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2254335260115607,
						"acc_stderr,none": 0.031862098516411426,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.17647058823529413,
						"acc_stderr,none": 0.03793281185307809,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768077,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.32340425531914896,
						"acc_stderr,none": 0.030579442773610334,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03947152782669415,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.3448275862068966,
						"acc_stderr,none": 0.03960933549451207,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2724867724867725,
						"acc_stderr,none": 0.022930973071633342,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.23015873015873015,
						"acc_stderr,none": 0.03764950879790606,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.24838709677419354,
						"acc_stderr,none": 0.024580028921481,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.270935960591133,
						"acc_stderr,none": 0.031270907132976984,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.21212121212121213,
						"acc_stderr,none": 0.03192271569548298,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.17676767676767677,
						"acc_stderr,none": 0.027178752639044915,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.25906735751295334,
						"acc_stderr,none": 0.031618779179354094,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.28205128205128205,
						"acc_stderr,none": 0.0228158130988966,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2518518518518518,
						"acc_stderr,none": 0.02646611753895991,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3025210084033613,
						"acc_stderr,none": 0.029837962388291926,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2251655629139073,
						"acc_stderr,none": 0.03410435282008936,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.23669724770642203,
						"acc_stderr,none": 0.01822407811729908,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.18981481481481483,
						"acc_stderr,none": 0.026744714834691926,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.028867431449849313,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.29535864978902954,
						"acc_stderr,none": 0.029696338713422882,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.34977578475336324,
						"acc_stderr,none": 0.03200736719484504,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.31297709923664124,
						"acc_stderr,none": 0.04066962905677697,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2563230605738576,
						"acc_stderr,none": 0.03386786690089871,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.23140495867768596,
						"acc_stderr,none": 0.0384985609879409,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04186091791394607,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2085889570552147,
						"acc_stderr,none": 0.03192193448934722,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3482142857142857,
						"acc_stderr,none": 0.04521829902833585,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2524271844660194,
						"acc_stderr,none": 0.04301250399690877,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3034188034188034,
						"acc_stderr,none": 0.030118210106942652,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.27330779054916987,
						"acc_stderr,none": 0.01593668106262856,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.28034682080924855,
						"acc_stderr,none": 0.024182427496577605,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2424581005586592,
						"acc_stderr,none": 0.014333522059217892,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3202614379084967,
						"acc_stderr,none": 0.02671611838015684,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.28355326681686505,
						"acc_stderr,none": 0.04517689484067604,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.21864951768488747,
						"acc_stderr,none": 0.023475581417861106,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2932098765432099,
						"acc_stderr,none": 0.025329888171900926,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.26595744680851063,
						"acc_stderr,none": 0.026358065698880585,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2646675358539765,
						"acc_stderr,none": 0.011267332992845528,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.20588235294117646,
						"acc_stderr,none": 0.024562204314142314,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.017848089574913226,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2636363636363636,
						"acc_stderr,none": 0.04220224692971987,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.27755102040816326,
						"acc_stderr,none": 0.028666857790274645,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2642183945401364,
						"acc_stderr,none": 0.040441227184438336,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.2736318407960199,
						"acc_stderr,none": 0.031524391865554016,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.25975261655566123,
						"acc_stderr,none": 0.05060138212955545,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.0371172519074075,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.32748538011695905,
						"acc_stderr,none": 0.03599335771456027,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.37677024961793176,
						"acc_stderr,none": 0.004891469646507829,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.37886493083807976,
						"acc_stderr,none": 0.004892562481455205,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6838235294117647,
						"acc_stderr,none": 0.02304833666842021,
						"alias": "mrpc",
						"f1,none": 0.8122270742358079,
						"f1_stderr,none": 0.016218335300780515
					},
					"multimedqa": {
						"acc,none": 0.3043293115684883,
						"acc_norm,none": 0.28805039976427615,
						"acc_norm_stderr,none": 0.00012737882966735257,
						"acc_stderr,none": 0.07337574198795123,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.540016501650165,
						"acc_stderr,none": 0.007158765420296098,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6816215215768018,
						"mrr_stderr,none": 0.010312738938315686,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.43792325056433407,
						"r@2_stderr,none": 0.016677278334075056
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6418359687984931,
						"mrr_stderr,none": 0.010428400520755669,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.46275395033860045,
						"r@2_stderr,none": 0.016760618753481935
					},
					"openbookqa": {
						"acc,none": 0.288,
						"acc_norm,none": 0.384,
						"acc_norm_stderr,none": 0.0217723694655472,
						"acc_stderr,none": 0.020271503835075217,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.427,
						"acc_stderr,none": 0.0110633041334482,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.369,
						"acc_stderr,none": 0.010792485296234448,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.411,
						"acc_stderr,none": 0.011004546788714929,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.011139750761283318,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5395,
						"acc_stderr,none": 0.011148184426533285,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.531,
						"acc_stderr,none": 0.01116162133811447,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.01118233080628221,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.47364285714285714,
						"acc_stderr,none": 0.049622775906176066,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7529923830250272,
						"acc_norm,none": 0.7616974972796517,
						"acc_norm_stderr,none": 0.009940334245876207,
						"acc_stderr,none": 0.01006226814077261,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2677732707087959,
						"acc_norm,none": 0.2741246797608881,
						"acc_norm_stderr,none": 0.003258954036112774,
						"acc_stderr,none": 0.003235039234263565,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618556,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7178123074613596,
						"acc_norm,none": 0.5440739814268382,
						"acc_norm_stderr,none": 0.004232947643798331,
						"acc_stderr,none": 0.15395298084443274,
						"alias": "pythia",
						"bits_per_byte,none": 0.6441185953327017,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.562784222210745,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.109855514494685,
						"perplexity_stderr,none": 0.0884849781693152,
						"word_perplexity,none": 10.88592724737089,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.41134751773049644,
						"acc_norm,none": 0.450354609929078,
						"acc_norm_stderr,none": 0.04844321154297932,
						"acc_stderr,none": 0.03671293143688081,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.43333333333333335,
						"acc_norm,none": 0.5333333333333333,
						"acc_norm_stderr,none": 0.0457329560380023,
						"acc_stderr,none": 0.04542567625794981,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.41875,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.03965257928590721,
						"acc_stderr,none": 0.039125538756915115,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.397887323943662,
						"acc_norm,none": 0.3873239436619718,
						"acc_norm_stderr,none": 0.028957389575950957,
						"acc_stderr,none": 0.029095492917064907,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5101592531576057,
						"acc_stderr,none": 0.006764013885818256,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.4801632451150136,
						"acc_stderr,none": 0.002484742843302269,
						"alias": "qqp",
						"f1,none": 0.3858449490079191,
						"f1_stderr,none": 0.0033410717787748356
					},
					"race": {
						"acc,none": 0.3722488038277512,
						"acc_stderr,none": 0.01496098476089933,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.555956678700361,
						"acc_stderr,none": 0.029907396333795997,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.914,
						"acc_norm,none": 0.875,
						"acc_norm_stderr,none": 0.010463483381956722,
						"acc_stderr,none": 0.008870325962594766,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.555956678700361,
						"acc_stderr,none": 0.029907396333795997,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.5149082568807339,
						"acc_stderr,none": 0.0169343211533256,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5472358292512246,
						"acc_norm,none": 0.7439768069579126,
						"acc_norm_stderr,none": 0.003085674243491662,
						"acc_stderr,none": 0.003519281529819246,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5660710126118931,
						"acc_stderr,none": 0.03652281469368929,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5539863782051282,
						"acc_stderr,none": 0.004974999814693153,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.6595723117462248,
						"acc_stderr,none": 0.00477060094958968,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.48745098039215684,
						"acc_stderr,none": 0.0049494208303815165,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.30808319703653325,
						"acc_stderr,none": 0.05076102004868316,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2998776009791922,
						"bleu_acc_stderr,none": 0.00025729292329675844,
						"bleu_diff,none": -6.667858182648117,
						"bleu_diff_stderr,none": 0.5945693598660794,
						"bleu_max,none": 22.668450148573267,
						"bleu_max_stderr,none": 0.5419271742300632,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.00023475821985345906,
						"rouge1_diff,none": -10.019734436908287,
						"rouge1_diff_stderr,none": 0.8410534379202882,
						"rouge1_max,none": 45.737723487827154,
						"rouge1_max_stderr,none": 0.8122598704941005,
						"rouge2_acc,none": 0.204406364749082,
						"rouge2_acc_stderr,none": 0.00019929461127346601,
						"rouge2_diff,none": -10.871699444365984,
						"rouge2_diff_stderr,none": 1.0803246765266343,
						"rouge2_max,none": 29.424974644198702,
						"rouge2_max_stderr,none": 0.9969156641384315,
						"rougeL_acc,none": 0.2484700122399021,
						"rougeL_acc_stderr,none": 0.000228839050560548,
						"rougeL_diff,none": -10.186938529736118,
						"rougeL_diff_stderr,none": 0.8605854577587904,
						"rougeL_max,none": 43.12716790628842,
						"rougeL_max_stderr,none": 0.8138546469257434
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2998776009791922,
						"bleu_acc_stderr,none": 0.01604035296671362,
						"bleu_diff,none": -6.667858182648117,
						"bleu_diff_stderr,none": 0.7710832379620759,
						"bleu_max,none": 22.668450148573267,
						"bleu_max_stderr,none": 0.7361570309587916,
						"rouge1_acc,none": 0.2582619339045288,
						"rouge1_acc_stderr,none": 0.01532182168847618,
						"rouge1_diff,none": -10.019734436908287,
						"rouge1_diff_stderr,none": 0.9170896564242169,
						"rouge1_max,none": 45.737723487827154,
						"rouge1_max_stderr,none": 0.901254609138894,
						"rouge2_acc,none": 0.204406364749082,
						"rouge2_acc_stderr,none": 0.014117174337432616,
						"rouge2_diff,none": -10.871699444365984,
						"rouge2_diff_stderr,none": 1.0393866828695826,
						"rouge2_max,none": 29.424974644198702,
						"rouge2_max_stderr,none": 0.9984566410908545,
						"rougeL_acc,none": 0.2484700122399021,
						"rougeL_acc_stderr,none": 0.015127427096520677,
						"rougeL_diff,none": -10.186938529736118,
						"rougeL_diff_stderr,none": 0.9276774535143076,
						"rougeL_max,none": 43.12716790628842,
						"rougeL_max_stderr,none": 0.9021389288384264
					},
					"truthfulqa_mc1": {
						"acc,none": 0.204406364749082,
						"acc_stderr,none": 0.014117174337432621,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3599216131802589,
						"acc_stderr,none": 0.013456167431999873,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.022637795275590553,
						"exact_match_stderr,none": 0.0033005770276179373
					},
					"wic": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.01981072129375818,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6441185953327017,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.562784222210745,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.88592724737089,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6416732438831886,
						"acc_stderr,none": 0.013476581172567552,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5070422535211268,
						"acc_stderr,none": 0.059755502635482904,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8278388278388278,
						"acc_stderr,none": 0.02289054060353955,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5441818181818182,
						"acc_stderr,none": 0.03440380430176429,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.022311333245289663,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.022270877485360437,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.022017482578127676,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.022374298166353196,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928028,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.02230396677426995,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.02223197069632112,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.592,
						"acc_stderr,none": 0.022000910893877186,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4138688085676038,
						"acc_stderr,none": 0.05358123052295437,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3385542168674699,
						"acc_stderr,none": 0.009485250208516878,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4004016064257028,
						"acc_stderr,none": 0.00982122560976308,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4759036144578313,
						"acc_stderr,none": 0.010010427753210671,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3485943775100402,
						"acc_stderr,none": 0.00955154205330182,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5570281124497992,
						"acc_stderr,none": 0.009956671790008798,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4859437751004016,
						"acc_stderr,none": 0.010018111813088548,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4919678714859438,
						"acc_stderr,none": 0.010020779633955255,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.40883534136546185,
						"acc_stderr,none": 0.009854078067810773,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.42128514056224897,
						"acc_stderr,none": 0.009897099560589198,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.36947791164658633,
						"acc_stderr,none": 0.00967457608577645,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.38353413654618473,
						"acc_stderr,none": 0.009746396613443776,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.40240963855421685,
						"acc_stderr,none": 0.009829321288467441,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.009556642460138157,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41365461847389556,
						"acc_stderr,none": 0.009871502159099373,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3610441767068273,
						"acc_stderr,none": 0.009627269742195714,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5490042717044703,
						"acc_stderr,none": 0.057401991595252236,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5155526141628061,
						"acc_stderr,none": 0.012860899111470788,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7193911317008603,
						"acc_stderr,none": 0.011562314078147744,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6082064857710126,
						"acc_stderr,none": 0.012562199063960652,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5208471211118465,
						"acc_stderr,none": 0.01285593628288127,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5188616810059563,
						"acc_stderr,none": 0.012857966762464992,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5373924553275976,
						"acc_stderr,none": 0.012831093347016553,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.48510919920582396,
						"acc_stderr,none": 0.012861417842074006,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5585704831237591,
						"acc_stderr,none": 0.012778538985880638,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.49040370615486434,
						"acc_stderr,none": 0.01286475526040896,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5420251489080079,
						"acc_stderr,none": 0.012821595164245277,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.542686962276638,
						"acc_stderr,none": 0.01282014720425624,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7424140256237357,
						"acc_stderr,none": 0.06344495078776032,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8412903225806452,
						"acc_stderr,none": 0.0075797798014554316,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6265060240963856,
						"acc_stderr,none": 0.05341921480681957,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6100104275286757,
						"acc_stderr,none": 0.01575840915568299,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6806083650190115,
						"acc_stderr,none": 0.02880446175714679,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6412698412698413,
						"acc_stderr,none": 0.02706696296058237,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6527777777777778,
						"acc_stderr,none": 0.021227675707409237,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "EleutherAI/gpt-j-6b"
	},
	"EleutherAI/pythia-1.4b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.4977452085682074,
						"acc_norm,none": 0.4551860202931229,
						"acc_norm_stderr,none": 0.04088593831190187,
						"acc_stderr,none": 0.05581052406582443,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3315625,
						"acc_stderr,none": 0.014496320837187384,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0083,
						"acc_stderr,none": 0.008454940260116464,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8290149253731344,
						"acc_stderr,none": 0.15572318305401575,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2303120356612184,
						"acc_norm,none": 0.2303120356612184,
						"acc_norm_stderr,none": 0.10749642905425433,
						"acc_stderr,none": 0.10749642905425433,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2526333966499741,
						"acc_norm,none": 0.2526333966499741,
						"acc_norm_stderr,none": 0.034860243561276864,
						"acc_stderr,none": 0.034860243561276864,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.48395572450805,
						"likelihood_diff_stderr,none": 0.4301616381954968,
						"pct_stereotype,none": 0.5213178294573644,
						"pct_stereotype_stderr,none": 0.09022414499121097
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.02066929133858268,
						"exact_match_stderr,none": 0.003156984997714907
					},
					"glue": {
						"acc,none": 0.47058117119112364,
						"acc_stderr,none": 0.07765693734504696,
						"alias": "glue",
						"f1,none": 0.28147253900807423,
						"f1_stderr,none": 0.0025160555797614894,
						"mcc,none": -0.017469438128079558,
						"mcc_stderr,none": 0.0007894138055410764
					},
					"kmmlu": {
						"acc,none": 0.09737222061796129,
						"acc_norm,none": 0.09737222061796129,
						"acc_norm_stderr,none": 0.06549162015621265,
						"acc_stderr,none": 0.06549162015621265,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.4834466125849594,
						"acc_norm,none": 0.452,
						"acc_norm_stderr,none": 0.0004963847695390727,
						"acc_stderr,none": 0.038183260446558405,
						"alias": "kobest",
						"f1,none": 0.3832191200056395,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.549776829031632,
						"acc_stderr,none": 0.029819066874259365,
						"alias": "lambada",
						"perplexity,none": 8.725577906021975,
						"perplexity_stderr,none": 1.2768779328981876
					},
					"lambada_cloze": {
						"acc,none": 0.017756646613623132,
						"acc_stderr,none": 0.005051374643815774,
						"alias": "lambada_cloze",
						"perplexity,none": 1186.5405557654572,
						"perplexity_stderr,none": 50.335713937668615
					},
					"lambada_multilingual": {
						"acc,none": 0.3551717446147875,
						"acc_stderr,none": 0.07534247534991442,
						"alias": "lambada_multilingual",
						"perplexity,none": 115.92261183841723,
						"perplexity_stderr,none": 35.076671078991765
					},
					"mmlu": {
						"acc,none": 0.24262925509186725,
						"acc_stderr,none": 0.0405155451161366,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.24675876726886292,
						"acc_stderr,none": 0.030621180149569334,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2552301255230126,
						"acc_stderr,none": 0.04807863077419059,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2317192070198245,
						"acc_stderr,none": 0.0369686942637409,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.23469711385981604,
						"acc_stderr,none": 0.046141324733880607,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.3156848828956707,
						"acc_norm,none": 0.29894074319008473,
						"acc_norm_stderr,none": 0.00015621793645494446,
						"acc_stderr,none": 0.07865320221968022,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5087857142857144,
						"acc_stderr,none": 0.03126781410665379,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7072131190731042,
						"acc_norm,none": 0.4628928749628576,
						"acc_norm_stderr,none": 0.004292906614214132,
						"acc_stderr,none": 0.15309813568529757,
						"alias": "pythia",
						"bits_per_byte,none": 0.7271127300547762,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6553229715896627,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 6.216575252642833,
						"perplexity_stderr,none": 0.15922862256504106,
						"word_perplexity,none": 14.806917563131952,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.32092198581560283,
						"acc_norm,none": 0.38652482269503546,
						"acc_norm_stderr,none": 0.04933381181678352,
						"acc_stderr,none": 0.03772915939734339,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5462047851984959,
						"acc_stderr,none": 0.032159662679077924,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.33800571184289424,
						"acc_stderr,none": 0.05176518030122277,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3023255813953488,
						"bleu_acc_stderr,none": 0.00025848630420259307,
						"bleu_diff,none": -5.36844737468472,
						"bleu_diff_stderr,none": 0.5175770416812936,
						"bleu_max,none": 22.216469952642846,
						"bleu_max_stderr,none": 0.4900958976085792,
						"rouge1_acc,none": 0.2692778457772338,
						"rouge1_acc_stderr,none": 0.00024113638180246015,
						"rouge1_diff,none": -8.079096928330063,
						"rouge1_diff_stderr,none": 0.7776012200410812,
						"rouge1_max,none": 46.2610076317151,
						"rouge1_max_stderr,none": 0.7657034695451194,
						"rouge2_acc,none": 0.21297429620563035,
						"rouge2_acc_stderr,none": 0.00020541206539380808,
						"rouge2_diff,none": -9.522649753405604,
						"rouge2_diff_stderr,none": 0.9673815632914815,
						"rouge2_max,none": 29.183306916690952,
						"rouge2_max_stderr,none": 0.9443906575723012,
						"rougeL_acc,none": 0.25703794369645044,
						"rougeL_acc_stderr,none": 0.00023403117548621342,
						"rougeL_diff,none": -8.216777978832019,
						"rougeL_diff_stderr,none": 0.7636650717651497,
						"rougeL_max,none": 43.54923154149236,
						"rougeL_max_stderr,none": 0.7660686840940987
					},
					"xcopa": {
						"acc,none": 0.5265454545454545,
						"acc_stderr,none": 0.027440572119689203,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.38934404283801877,
						"acc_stderr,none": 0.044348257927515206,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5178990433788581,
						"acc_stderr,none": 0.045133185798356706,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.6826253090582153,
						"acc_stderr,none": 0.05882248701216535,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.4977452085682074,
						"acc_norm,none": 0.4551860202931229,
						"acc_norm_stderr,none": 0.04088593831190187,
						"acc_stderr,none": 0.05581052406582443,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3315625,
						"acc_stderr,none": 0.014496320837187384,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.01493311749093257,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.329,
						"acc_stderr,none": 0.014865395385928367,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3308333333333333,
						"acc_stderr,none": 0.013588208070709007,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.2619453924914676,
						"acc_norm,none": 0.2858361774744027,
						"acc_norm_stderr,none": 0.013203196088537369,
						"acc_stderr,none": 0.012849054826858114,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6140572390572391,
						"acc_norm,none": 0.5387205387205387,
						"acc_norm_stderr,none": 0.010228972678389599,
						"acc_stderr,none": 0.009989277329503951,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0083,
						"acc_stderr,none": 0.008454940260116464,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0295,
						"acc_stderr,none": 0.0037844465933619237,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0165,
						"acc_stderr,none": 0.0028491988289663403,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.021,
						"acc_stderr,none": 0.0032069677767574654,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0125,
						"acc_stderr,none": 0.00248494717876267,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521528,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521539,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000151,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0013015184381778742,
						"acc_stderr,none": 0.0007511058074590335,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8290149253731344,
						"acc_stderr,none": 0.15572318305401575,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024973,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000048,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844881,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.789,
						"acc_stderr,none": 0.012909130321042095,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074792,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.767,
						"acc_stderr,none": 0.013374972519220053,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.64,
						"acc_stderr,none": 0.01518652793204012,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.639,
						"acc_stderr,none": 0.015195720118175118,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747391,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689101,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706824,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380716,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639233,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118578,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753653,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698465,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.011772110370812189,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.699,
						"acc_stderr,none": 0.014512395033543147,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.819,
						"acc_stderr,none": 0.012181436179177907,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.0111717862854965,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713327,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.011075814808567038,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689071,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.281,
						"acc_stderr,none": 0.01422115470843493,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695803,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.01297583802196877,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.676,
						"acc_stderr,none": 0.014806864733738863,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.823,
						"acc_stderr,none": 0.012075463420375061,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474913,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491123,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571415,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697593,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.735,
						"acc_stderr,none": 0.013963164754809954,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.759,
						"acc_stderr,none": 0.013531522534515434,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.015778243024904586,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.763,
						"acc_stderr,none": 0.01345407046257794,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.666,
						"acc_stderr,none": 0.014922019523732974,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.0071508835212954446,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.015070604603768408,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248137,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340995,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816338,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.0070246242138171456,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469401,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.00989939381972443,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.012864077288499328,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.349,
						"acc_stderr,none": 0.0150806639915631,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.971,
						"acc_stderr,none": 0.0053091606857569975,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177549,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767593,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.783,
						"acc_stderr,none": 0.01304151375727071,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.401,
						"acc_stderr,none": 0.015506109745498323,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343966,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.844,
						"acc_stderr,none": 0.011480235006122365,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.727,
						"acc_stderr,none": 0.014095022868717593,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.010570133761108652,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336664,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.831,
						"acc_stderr,none": 0.011856625977890115,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890141,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426097,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406723,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318227,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140922,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.471,
						"acc_stderr,none": 0.0157926694516289,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.352,
						"acc_stderr,none": 0.015110404505648671,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6287461773700306,
						"acc_stderr,none": 0.008450174658715908,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.23214285714285715,
						"acc_stderr,none": 0.05692939024000109,
						"alias": "cb",
						"f1,none": 0.19973164654015718,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2303120356612184,
						"acc_norm,none": 0.2303120356612184,
						"acc_norm_stderr,none": 0.10749642905425433,
						"acc_stderr,none": 0.10749642905425433,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736841,
						"acc_stderr,none": 0.05263157894736841,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757577,
						"acc_stderr,none": 0.07575757575757577,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.06429065810876616,
						"acc_stderr,none": 0.06429065810876616,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445796,
						"acc_stderr,none": 0.06180629713445796,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.06861056852129647,
						"acc_stderr,none": 0.06861056852129647,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.16129032258064516,
						"acc_norm,none": 0.16129032258064516,
						"acc_norm_stderr,none": 0.06715051611181073,
						"acc_stderr,none": 0.06715051611181073,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755131,
						"acc_stderr,none": 0.08780518530755131,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271772,
						"acc_stderr,none": 0.10163945352271772,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.06957698714453991,
						"acc_stderr,none": 0.06957698714453991,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.06148754619013454,
						"acc_stderr,none": 0.06148754619013454,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2526333966499741,
						"acc_norm,none": 0.2526333966499741,
						"acc_norm_stderr,none": 0.034860243561276864,
						"acc_stderr,none": 0.034860243561276864,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.24880382775119617,
						"acc_norm,none": 0.24880382775119617,
						"acc_norm_stderr,none": 0.029975990636702532,
						"acc_stderr,none": 0.029975990636702532,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865143,
						"acc_stderr,none": 0.03462157845865143,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.25190839694656486,
						"acc_norm,none": 0.25190839694656486,
						"acc_norm_stderr,none": 0.03807387116306086,
						"acc_stderr,none": 0.03807387116306086,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037267799624996496,
						"acc_stderr,none": 0.037267799624996496,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.04218811928205305,
						"acc_stderr,none": 0.04218811928205305,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25077399380804954,
						"acc_norm,none": 0.25077399380804954,
						"acc_norm_stderr,none": 0.024155705949743284,
						"acc_stderr,none": 0.024155705949743284,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604257,
						"acc_stderr,none": 0.030587591351604257,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.25139664804469275,
						"acc_norm,none": 0.25139664804469275,
						"acc_norm_stderr,none": 0.032515888371841106,
						"acc_stderr,none": 0.032515888371841106,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.02830465794303529,
						"acc_stderr,none": 0.02830465794303529,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.045223500773820306,
						"acc_stderr,none": 0.045223500773820306,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.044801270921106716,
						"acc_stderr,none": 0.044801270921106716,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980982,
						"acc_stderr,none": 0.03957835471980982,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.24175824175824176,
						"acc_norm,none": 0.24175824175824176,
						"acc_norm_stderr,none": 0.025960319996852693,
						"acc_stderr,none": 0.025960319996852693,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604246,
						"acc_stderr,none": 0.030587591351604246,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2573099415204678,
						"acc_norm,none": 0.2573099415204678,
						"acc_norm_stderr,none": 0.03352799844161865,
						"acc_stderr,none": 0.03352799844161865,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.25170068027210885,
						"acc_norm,none": 0.25170068027210885,
						"acc_norm_stderr,none": 0.03591728013761648,
						"acc_stderr,none": 0.03591728013761648,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2446043165467626,
						"acc_norm,none": 0.2446043165467626,
						"acc_norm_stderr,none": 0.03659146222520568,
						"acc_stderr,none": 0.03659146222520568,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.034229240176444506,
						"acc_stderr,none": 0.034229240176444506,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25153374233128833,
						"acc_norm,none": 0.25153374233128833,
						"acc_norm_stderr,none": 0.03408997886857529,
						"acc_stderr,none": 0.03408997886857529,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.028394293050790515,
						"acc_stderr,none": 0.028394293050790515,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.030532892233932032,
						"acc_stderr,none": 0.030532892233932032,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.27310924369747897,
						"acc_norm,none": 0.27310924369747897,
						"acc_norm_stderr,none": 0.028942004040998164,
						"acc_stderr,none": 0.028942004040998164,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2782608695652174,
						"acc_norm,none": 0.2782608695652174,
						"acc_norm_stderr,none": 0.029614094221633722,
						"acc_stderr,none": 0.029614094221633722,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.03642192783741706,
						"acc_stderr,none": 0.03642192783741706,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032732683535398856,
						"acc_stderr,none": 0.032732683535398856,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2483221476510067,
						"acc_norm,none": 0.2483221476510067,
						"acc_norm_stderr,none": 0.0355134404169743,
						"acc_stderr,none": 0.0355134404169743,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714263,
						"acc_stderr,none": 0.04025566684714263,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.03642192783741706,
						"acc_stderr,none": 0.03642192783741706,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.24603174603174602,
						"acc_norm,none": 0.24603174603174602,
						"acc_norm_stderr,none": 0.038522733649243156,
						"acc_stderr,none": 0.038522733649243156,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25060827250608275,
						"acc_norm,none": 0.25060827250608275,
						"acc_norm_stderr,none": 0.021402288814095338,
						"acc_stderr,none": 0.021402288814095338,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.029761395837435988,
						"acc_stderr,none": 0.029761395837435988,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.25203252032520324,
						"acc_norm,none": 0.25203252032520324,
						"acc_norm_stderr,none": 0.039308795268239924,
						"acc_stderr,none": 0.039308795268239924,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2540983606557377,
						"acc_norm,none": 0.2540983606557377,
						"acc_norm_stderr,none": 0.03957756102798664,
						"acc_stderr,none": 0.03957756102798664,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.02985642316467189,
						"acc_stderr,none": 0.02985642316467189,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032364888900157734,
						"acc_stderr,none": 0.032364888900157734,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871163,
						"acc_stderr,none": 0.03152480234871163,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.25517241379310346,
						"acc_norm,none": 0.25517241379310346,
						"acc_norm_stderr,none": 0.03632984052707842,
						"acc_stderr,none": 0.03632984052707842,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055042,
						"acc_stderr,none": 0.04232473532055042,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.032888897342098225,
						"acc_stderr,none": 0.032888897342098225,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.25118483412322273,
						"acc_norm,none": 0.25118483412322273,
						"acc_norm_stderr,none": 0.029927771242945208,
						"acc_stderr,none": 0.029927771242945208,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.022360679774997897,
						"acc_stderr,none": 0.022360679774997897,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.028490144114909487,
						"acc_stderr,none": 0.028490144114909487,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.25287356321839083,
						"acc_norm,none": 0.25287356321839083,
						"acc_norm_stderr,none": 0.0330465186437516,
						"acc_stderr,none": 0.0330465186437516,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.028952167450890808,
						"acc_stderr,none": 0.028952167450890808,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2422360248447205,
						"acc_norm,none": 0.2422360248447205,
						"acc_norm_stderr,none": 0.03387086996153082,
						"acc_stderr,none": 0.03387086996153082,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.017469438128079558,
						"mcc_stderr,none": 0.028096508778513326
					},
					"copa": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.48395572450805,
						"likelihood_diff_stderr,none": 0.4301616381954968,
						"pct_stereotype,none": 0.5213178294573644,
						"pct_stereotype_stderr,none": 0.09022414499121097
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.3724657125819917,
						"likelihood_diff_stderr,none": 0.08576055769938315,
						"pct_stereotype,none": 0.5855694692903995,
						"pct_stereotype_stderr,none": 0.012033115254329001
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.5549450549450547,
						"likelihood_diff_stderr,none": 0.37455309548043714,
						"pct_stereotype,none": 0.6373626373626373,
						"pct_stereotype_stderr,none": 0.050676699210318685
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.045454545454546,
						"likelihood_diff_stderr,none": 2.2808038310434506,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.8538461538461535,
						"likelihood_diff_stderr,none": 0.6102561998356674,
						"pct_stereotype,none": 0.7230769230769231,
						"pct_stereotype_stderr,none": 0.055934767585573
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.566015625,
						"likelihood_diff_stderr,none": 0.1733199922094096,
						"pct_stereotype,none": 0.58125,
						"pct_stereotype_stderr,none": 0.027622536202702153
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.236111111111111,
						"likelihood_diff_stderr,none": 0.22119027188528595,
						"pct_stereotype,none": 0.5555555555555556,
						"pct_stereotype_stderr,none": 0.03388857118502325
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.703125,
						"likelihood_diff_stderr,none": 0.3800073026321929,
						"pct_stereotype,none": 0.6805555555555556,
						"pct_stereotype_stderr,none": 0.05533504751887218
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.1970964566929134,
						"likelihood_diff_stderr,none": 0.14582868917342046,
						"pct_stereotype,none": 0.4763779527559055,
						"pct_stereotype_stderr,none": 0.022180984040966984
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.105855855855856,
						"likelihood_diff_stderr,none": 0.29450849456979494,
						"pct_stereotype,none": 0.7207207207207207,
						"pct_stereotype_stderr,none": 0.04277662524881439
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.356182795698925,
						"likelihood_diff_stderr,none": 0.48563300866860065,
						"pct_stereotype,none": 0.8387096774193549,
						"pct_stereotype_stderr,none": 0.03834564688497145
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 3.8039473684210527,
						"likelihood_diff_stderr,none": 0.2325134503209737,
						"pct_stereotype,none": 0.6368421052631579,
						"pct_stereotype_stderr,none": 0.03498104083833202
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.5965638044126416,
						"likelihood_diff_stderr,none": 0.08860853583478216,
						"pct_stereotype,none": 0.4537865235539654,
						"pct_stereotype_stderr,none": 0.012161019796992528
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.5861111111111112,
						"likelihood_diff_stderr,none": 0.3643603220792967,
						"pct_stereotype,none": 0.4444444444444444,
						"pct_stereotype_stderr,none": 0.052671718126664185
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.0576923076923075,
						"likelihood_diff_stderr,none": 1.15405980927875,
						"pct_stereotype,none": 0.38461538461538464,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.454545454545454,
						"likelihood_diff_stderr,none": 0.47599451436664825,
						"pct_stereotype,none": 0.5909090909090909,
						"pct_stereotype_stderr,none": 0.06098367211363066
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.338006230529595,
						"likelihood_diff_stderr,none": 0.1943489216558413,
						"pct_stereotype,none": 0.46417445482866043,
						"pct_stereotype_stderr,none": 0.02787900925837708
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.205039525691699,
						"likelihood_diff_stderr,none": 0.23162134612986204,
						"pct_stereotype,none": 0.31620553359683795,
						"pct_stereotype_stderr,none": 0.029291880485542002
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.8194444444444446,
						"likelihood_diff_stderr,none": 0.5119661190713992,
						"pct_stereotype,none": 0.5972222222222222,
						"pct_stereotype_stderr,none": 0.05820650942569532
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.279891304347826,
						"likelihood_diff_stderr,none": 0.17036812599333676,
						"pct_stereotype,none": 0.31956521739130433,
						"pct_stereotype_stderr,none": 0.02176540043885054
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.4630434782608694,
						"likelihood_diff_stderr,none": 0.3202164164713817,
						"pct_stereotype,none": 0.6695652173913044,
						"pct_stereotype_stderr,none": 0.04405415696687147
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.4835164835164836,
						"likelihood_diff_stderr,none": 0.3301080732355634,
						"pct_stereotype,none": 0.7362637362637363,
						"pct_stereotype_stderr,none": 0.046449428524973954
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.7675382653061225,
						"likelihood_diff_stderr,none": 0.26320936489070784,
						"pct_stereotype,none": 0.6020408163265306,
						"pct_stereotype_stderr,none": 0.0350521715047299
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.02066929133858268,
						"exact_match_stderr,none": 0.003156984997714907
					},
					"glue": {
						"acc,none": 0.47058117119112364,
						"acc_stderr,none": 0.07765693734504696,
						"alias": "glue",
						"f1,none": 0.28147253900807423,
						"f1_stderr,none": 0.0025160555797614894,
						"mcc,none": -0.017469438128079558,
						"mcc_stderr,none": 0.0007894138055410764
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.01288855193328279,
						"exact_match_stderr,get-answer": 0.0031069012664996618
					},
					"hellaswag": {
						"acc,none": 0.4032065325632344,
						"acc_norm,none": 0.519717187811193,
						"acc_norm_stderr,none": 0.004985900172317697,
						"acc_stderr,none": 0.0048953903414456264,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.09737222061796129,
						"acc_norm,none": 0.09737222061796129,
						"acc_norm_stderr,none": 0.06549162015621265,
						"acc_stderr,none": 0.06549162015621265,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.03861229196653697,
						"acc_stderr,none": 0.03861229196653697,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.089,
						"acc_norm,none": 0.089,
						"acc_norm_stderr,none": 0.009008893392651526,
						"acc_stderr,none": 0.009008893392651526,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.075,
						"acc_norm,none": 0.075,
						"acc_norm_stderr,none": 0.008333333333333378,
						"acc_stderr,none": 0.008333333333333378,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.194,
						"acc_norm,none": 0.194,
						"acc_norm_stderr,none": 0.012510816141264368,
						"acc_stderr,none": 0.012510816141264368,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.197,
						"acc_norm,none": 0.197,
						"acc_norm_stderr,none": 0.012583693787968152,
						"acc_stderr,none": 0.012583693787968152,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.175,
						"acc_norm,none": 0.175,
						"acc_norm_stderr,none": 0.01552503498177411,
						"acc_stderr,none": 0.01552503498177411,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.007,
						"acc_norm,none": 0.007,
						"acc_norm_stderr,none": 0.0026377941462437785,
						"acc_stderr,none": 0.0026377941462437785,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.002,
						"acc_norm,none": 0.002,
						"acc_norm_stderr,none": 0.0014135055705578176,
						"acc_stderr,none": 0.0014135055705578176,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.019,
						"acc_norm,none": 0.019,
						"acc_norm_stderr,none": 0.004319451082910625,
						"acc_stderr,none": 0.004319451082910625,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.033,
						"acc_norm,none": 0.033,
						"acc_norm_stderr,none": 0.005651808820452374,
						"acc_stderr,none": 0.005651808820452374,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.040347329239296424,
						"acc_stderr,none": 0.040347329239296424,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.019,
						"acc_norm,none": 0.019,
						"acc_norm_stderr,none": 0.004319451082910613,
						"acc_stderr,none": 0.004319451082910613,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.016,
						"acc_norm,none": 0.016,
						"acc_norm_stderr,none": 0.003969856390319416,
						"acc_stderr,none": 0.003969856390319416,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.177,
						"acc_norm,none": 0.177,
						"acc_norm_stderr,none": 0.012075463420375061,
						"acc_stderr,none": 0.012075463420375061,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.019,
						"acc_norm,none": 0.019,
						"acc_norm_stderr,none": 0.004319451082910603,
						"acc_stderr,none": 0.004319451082910603,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.131,
						"acc_norm,none": 0.131,
						"acc_norm_stderr,none": 0.010674874844837956,
						"acc_stderr,none": 0.010674874844837956,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.111,
						"acc_norm,none": 0.111,
						"acc_norm_stderr,none": 0.009938701010583726,
						"acc_stderr,none": 0.009938701010583726,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.079,
						"acc_norm,none": 0.079,
						"acc_norm_stderr,none": 0.008534156773333452,
						"acc_stderr,none": 0.008534156773333452,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.07,
						"acc_norm,none": 0.07,
						"acc_norm_stderr,none": 0.008072494358323485,
						"acc_stderr,none": 0.008072494358323485,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.018,
						"acc_norm,none": 0.018,
						"acc_norm_stderr,none": 0.0042063872496114615,
						"acc_stderr,none": 0.0042063872496114615,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.028,
						"acc_norm,none": 0.028,
						"acc_norm_stderr,none": 0.005219506034410047,
						"acc_stderr,none": 0.005219506034410047,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.057,
						"acc_norm,none": 0.057,
						"acc_norm_stderr,none": 0.007335175853706822,
						"acc_stderr,none": 0.007335175853706822,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.231,
						"acc_norm,none": 0.231,
						"acc_norm_stderr,none": 0.013334797216936426,
						"acc_stderr,none": 0.013334797216936426,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.069,
						"acc_norm,none": 0.069,
						"acc_norm_stderr,none": 0.008018934050315157,
						"acc_stderr,none": 0.008018934050315157,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.186,
						"acc_norm,none": 0.186,
						"acc_norm_stderr,none": 0.012310790208412808,
						"acc_stderr,none": 0.012310790208412808,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.14166666666666666,
						"acc_norm,none": 0.14166666666666666,
						"acc_norm_stderr,none": 0.014247819867919655,
						"acc_stderr,none": 0.014247819867919655,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.102,
						"acc_norm,none": 0.102,
						"acc_norm_stderr,none": 0.009575368801653897,
						"acc_stderr,none": 0.009575368801653897,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.105,
						"acc_norm,none": 0.105,
						"acc_norm_stderr,none": 0.009698921026024952,
						"acc_stderr,none": 0.009698921026024952,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.061,
						"acc_norm,none": 0.061,
						"acc_norm_stderr,none": 0.007572076091557422,
						"acc_stderr,none": 0.007572076091557422,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.077,
						"acc_norm,none": 0.077,
						"acc_norm_stderr,none": 0.00843458014024064,
						"acc_stderr,none": 0.00843458014024064,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22666666666666666,
						"acc_norm,none": 0.22666666666666666,
						"acc_norm_stderr,none": 0.024212609617951908,
						"acc_stderr,none": 0.024212609617951908,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.236,
						"acc_norm,none": 0.236,
						"acc_norm_stderr,none": 0.013434451402438678,
						"acc_stderr,none": 0.013434451402438678,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.039,
						"acc_norm,none": 0.039,
						"acc_norm_stderr,none": 0.006125072776426114,
						"acc_stderr,none": 0.006125072776426114,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.113,
						"acc_norm,none": 0.113,
						"acc_norm_stderr,none": 0.010016552866696839,
						"acc_stderr,none": 0.010016552866696839,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.027234326551496862,
						"acc_stderr,none": 0.027234326551496862,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.135,
						"acc_norm,none": 0.135,
						"acc_norm_stderr,none": 0.010811655372416054,
						"acc_stderr,none": 0.010811655372416054,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.151,
						"acc_norm,none": 0.151,
						"acc_norm_stderr,none": 0.011328165223341676,
						"acc_stderr,none": 0.011328165223341676,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.021,
						"acc_norm,none": 0.021,
						"acc_norm_stderr,none": 0.004536472151306523,
						"acc_stderr,none": 0.004536472151306523,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.4834466125849594,
						"acc_norm,none": 0.452,
						"acc_norm_stderr,none": 0.0004963847695390727,
						"acc_stderr,none": 0.038183260446558405,
						"alias": "kobest",
						"f1,none": 0.3832191200056395,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33681169605660166,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.01581727492920901,
						"alias": " - kobest_copa",
						"f1,none": 0.4912653872191445,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.354,
						"acc_norm,none": 0.452,
						"acc_norm_stderr,none": 0.022279694107843417,
						"acc_stderr,none": 0.021407582047916447,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3494156587291876,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5440806045340051,
						"acc_stderr,none": 0.02502811047400061,
						"alias": " - kobest_sentineg",
						"f1,none": 0.493011507552934,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.549776829031632,
						"acc_stderr,none": 0.029819066874259365,
						"alias": "lambada",
						"perplexity,none": 8.725577906021975,
						"perplexity_stderr,none": 1.2768779328981876
					},
					"lambada_cloze": {
						"acc,none": 0.017756646613623132,
						"acc_stderr,none": 0.005051374643815774,
						"alias": "lambada_cloze",
						"perplexity,none": 1186.5405557654572,
						"perplexity_stderr,none": 50.335713937668615
					},
					"lambada_multilingual": {
						"acc,none": 0.3551717446147875,
						"acc_stderr,none": 0.07534247534991442,
						"alias": "lambada_multilingual",
						"perplexity,none": 115.92261183841723,
						"perplexity_stderr,none": 35.076671078991765
					},
					"lambada_openai": {
						"acc,none": 0.6097418979235397,
						"acc_stderr,none": 0.006796120271549717,
						"alias": " - lambada_openai",
						"perplexity,none": 6.216575252642833,
						"perplexity_stderr,none": 0.15922862256504106
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.00834465359984475,
						"acc_stderr,none": 0.0012673501139635055,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 1217.0705265995605,
						"perplexity_stderr,none": 52.44869607362672
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.2542208422278284,
						"acc_stderr,none": 0.006066284446719121,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 155.7215999986184,
						"perplexity_stderr,none": 9.51275811645309
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6111003299049098,
						"acc_stderr,none": 0.006791834884450134,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 6.219317941684381,
						"perplexity_stderr,none": 0.1592866227268097
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.2841063458179701,
						"acc_stderr,none": 0.006283140862669239,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 156.60718511302414,
						"perplexity_stderr,none": 9.138188761472469
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.33145740345429847,
						"acc_stderr,none": 0.006558287884402315,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 101.36439160211859,
						"perplexity_stderr,none": 6.030450417860299
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.2949738016689307,
						"acc_stderr,none": 0.006353403285409045,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 159.70056453664057,
						"perplexity_stderr,none": 10.01455028302162
					},
					"lambada_standard": {
						"acc,none": 0.4917523772559674,
						"acc_stderr,none": 0.006965029895407403,
						"alias": " - lambada_standard",
						"perplexity,none": 11.224926147276436,
						"perplexity_stderr,none": 0.33420400506932724
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.027168639627401514,
						"acc_stderr,none": 0.0022649822374032815,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 1156.0105849313536,
						"perplexity_stderr,none": 43.02167179876815
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.23346055979643765,
						"exact_match_stderr,get-answer": 0.010672985547946033
					},
					"logiqa": {
						"acc,none": 0.20583717357910905,
						"acc_norm,none": 0.27956989247311825,
						"acc_norm_stderr,none": 0.017602909186822453,
						"acc_stderr,none": 0.015858423219323885,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.23155216284987276,
						"acc_norm,none": 0.27989821882951654,
						"acc_norm_stderr,none": 0.011326843954481584,
						"acc_stderr,none": 0.010642496713710918,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25192629815745393,
						"acc_norm,none": 0.2425460636515913,
						"acc_norm_stderr,none": 0.00784649711506857,
						"acc_stderr,none": 0.007947115720531419,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.5429993645414107,
						"acc_stderr,none": 0.005126831733896751,
						"alias": "mc_taco",
						"f1,none": 0.4631081249222347,
						"f1_stderr,none": 0.0068929911801440865
					},
					"medmcqa": {
						"acc,none": 0.3162801816877839,
						"acc_norm,none": 0.3162801816877839,
						"acc_norm_stderr,none": 0.007190896863029239,
						"acc_stderr,none": 0.007190896863029239,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.27651217596229377,
						"acc_norm,none": 0.27651217596229377,
						"acc_norm_stderr,none": 0.012540913938428874,
						"acc_stderr,none": 0.012540913938428874,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.24262925509186725,
						"acc_stderr,none": 0.0405155451161366,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.23703703703703705,
						"acc_stderr,none": 0.03673731683969506,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.17763157894736842,
						"acc_stderr,none": 0.03110318238312338,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.29056603773584905,
						"acc_stderr,none": 0.02794321998933713,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2638888888888889,
						"acc_stderr,none": 0.03685651095897532,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.03861229196653694,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768079,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.16184971098265896,
						"acc_stderr,none": 0.028083594279575765,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.04280105837364395,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.28936170212765955,
						"acc_stderr,none": 0.02964400657700962,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.18421052631578946,
						"acc_stderr,none": 0.03646758875075566,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.296551724137931,
						"acc_stderr,none": 0.03806142687309994,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.0220190800122179,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.25396825396825395,
						"acc_stderr,none": 0.03893259610604674,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.022755204959542936,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.16748768472906403,
						"acc_stderr,none": 0.02627308604753542,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.23030303030303031,
						"acc_stderr,none": 0.032876667586034886,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.17676767676767677,
						"acc_stderr,none": 0.027178752639044915,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.19689119170984457,
						"acc_stderr,none": 0.028697873971860674,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.23076923076923078,
						"acc_stderr,none": 0.021362027725222717,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.026719240783712173,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.0275536144678638,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2185430463576159,
						"acc_stderr,none": 0.03374235550425694,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.24587155963302754,
						"acc_stderr,none": 0.018461940968708443,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.18981481481481483,
						"acc_stderr,none": 0.026744714834691943,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.2696078431372549,
						"acc_stderr,none": 0.031145570659486782,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.25738396624472576,
						"acc_stderr,none": 0.028458820991460295,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3183856502242152,
						"acc_stderr,none": 0.03126580522513713,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.22137404580152673,
						"acc_stderr,none": 0.036412970813137296,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.24675876726886292,
						"acc_stderr,none": 0.030621180149569334,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.04065578140908705,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.04453197507374984,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.25153374233128833,
						"acc_stderr,none": 0.034089978868575295,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.25892857142857145,
						"acc_stderr,none": 0.04157751539865629,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.1262135922330097,
						"acc_stderr,none": 0.03288180278808628,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.29914529914529914,
						"acc_stderr,none": 0.02999695185834948,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2388250319284802,
						"acc_stderr,none": 0.0152468031973987,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2543352601156069,
						"acc_stderr,none": 0.023445826276545543,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24134078212290502,
						"acc_stderr,none": 0.014310999547961455,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.024954184324879905,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2552301255230126,
						"acc_stderr,none": 0.04807863077419059,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.18971061093247588,
						"acc_stderr,none": 0.02226819625878321,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.23765432098765432,
						"acc_stderr,none": 0.023683591837008557,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25886524822695034,
						"acc_stderr,none": 0.026129572527180848,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2438070404172099,
						"acc_stderr,none": 0.010966507972178475,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.20588235294117646,
						"acc_stderr,none": 0.024562204314142317,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.25163398692810457,
						"acc_stderr,none": 0.017555818091322277,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2909090909090909,
						"acc_stderr,none": 0.04350271442923243,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.025607375986579153,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2317192070198245,
						"acc_stderr,none": 0.0369686942637409,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.21393034825870647,
						"acc_stderr,none": 0.028996909693328916,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.23469711385981604,
						"acc_stderr,none": 0.046141324733880607,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.2710843373493976,
						"acc_stderr,none": 0.03460579907553027,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3157894736842105,
						"acc_stderr,none": 0.03565079670708312,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3268466632705043,
						"acc_stderr,none": 0.004734847572465972,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3263832384052075,
						"acc_stderr,none": 0.004729024000627127,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6568627450980392,
						"acc_stderr,none": 0.02353282402069415,
						"alias": "mrpc",
						"f1,none": 0.7859327217125383,
						"f1_stderr,none": 0.01763106339344774
					},
					"multimedqa": {
						"acc,none": 0.3156848828956707,
						"acc_norm,none": 0.29894074319008473,
						"acc_norm_stderr,none": 0.00015621793645494446,
						"acc_stderr,none": 0.07865320221968022,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5664191419141914,
						"acc_stderr,none": 0.007118155993424795,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6485139222702108,
						"mrr_stderr,none": 0.010314667633173057,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4322799097065463,
						"r@2_stderr,none": 0.016652445549879158
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6281978951821747,
						"mrr_stderr,none": 0.010413467796397005,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.45936794582392776,
						"r@2_stderr,none": 0.01675172766782549
					},
					"openbookqa": {
						"acc,none": 0.214,
						"acc_norm,none": 0.334,
						"acc_norm_stderr,none": 0.021113492347743734,
						"acc_stderr,none": 0.018359797502387,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4795,
						"acc_stderr,none": 0.011173732641806811,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.444,
						"acc_stderr,none": 0.011112774040420284,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4725,
						"acc_stderr,none": 0.01116620871686354,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.011135708419359796,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5505,
						"acc_stderr,none": 0.011125950223877364,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.543,
						"acc_stderr,none": 0.011141704034140798,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.011168006186472585,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5087857142857144,
						"acc_stderr,none": 0.03126781410665379,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7078346028291621,
						"acc_norm,none": 0.7083786724700761,
						"acc_norm_stderr,none": 0.010604441527428787,
						"acc_stderr,none": 0.01061025217451366,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.21045046968403075,
						"acc_norm,none": 0.2933390264730999,
						"acc_norm_stderr,none": 0.003326316944506626,
						"acc_stderr,none": 0.0029780898108479528,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.604,
						"acc_stderr,none": 0.021893529941665813,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7072131190731042,
						"acc_norm,none": 0.4628928749628576,
						"acc_norm_stderr,none": 0.004292906614214132,
						"acc_stderr,none": 0.15309813568529757,
						"alias": "pythia",
						"bits_per_byte,none": 0.7271127300547762,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6553229715896627,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 6.216575252642833,
						"perplexity_stderr,none": 0.15922862256504106,
						"word_perplexity,none": 14.806917563131952,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.32092198581560283,
						"acc_norm,none": 0.38652482269503546,
						"acc_norm_stderr,none": 0.04933381181678352,
						"acc_stderr,none": 0.03772915939734339,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.375,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.04583492485141056,
						"acc_stderr,none": 0.04437947515604539,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.038851434494290536,
						"acc_stderr,none": 0.036342189215581536,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.30985915492957744,
						"acc_norm,none": 0.33098591549295775,
						"acc_norm_stderr,none": 0.02797236390054683,
						"acc_stderr,none": 0.027488928644214792,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5000915248032217,
						"acc_stderr,none": 0.00676541043843172,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5291367796190948,
						"acc_stderr,none": 0.002482474862583732,
						"alias": "qqp",
						"f1,none": 0.27602205742536606,
						"f1_stderr,none": 0.0036339413851001422
					},
					"race": {
						"acc,none": 0.3444976076555024,
						"acc_stderr,none": 0.014707199932728215,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5306859205776173,
						"acc_stderr,none": 0.03003973059219781,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.865,
						"acc_norm,none": 0.794,
						"acc_norm_stderr,none": 0.012795613612786534,
						"acc_stderr,none": 0.010811655372416053,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5306859205776173,
						"acc_stderr,none": 0.03003973059219781,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.016403879298128067,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5026991902429271,
						"acc_norm,none": 0.6889433170048985,
						"acc_norm_stderr,none": 0.003272971595078723,
						"acc_stderr,none": 0.003535040535935045,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5462047851984959,
						"acc_stderr,none": 0.032159662679077924,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5085136217948718,
						"acc_stderr,none": 0.005003529939407276,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.6294719772980643,
						"acc_stderr,none": 0.0048621480010505366,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5025490196078432,
						"acc_stderr,none": 0.004950916077531901,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.33800571184289424,
						"acc_stderr,none": 0.05176518030122277,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3023255813953488,
						"bleu_acc_stderr,none": 0.00025848630420259307,
						"bleu_diff,none": -5.36844737468472,
						"bleu_diff_stderr,none": 0.5175770416812936,
						"bleu_max,none": 22.216469952642846,
						"bleu_max_stderr,none": 0.4900958976085792,
						"rouge1_acc,none": 0.2692778457772338,
						"rouge1_acc_stderr,none": 0.00024113638180246015,
						"rouge1_diff,none": -8.079096928330063,
						"rouge1_diff_stderr,none": 0.7776012200410812,
						"rouge1_max,none": 46.2610076317151,
						"rouge1_max_stderr,none": 0.7657034695451194,
						"rouge2_acc,none": 0.21297429620563035,
						"rouge2_acc_stderr,none": 0.00020541206539380808,
						"rouge2_diff,none": -9.522649753405604,
						"rouge2_diff_stderr,none": 0.9673815632914815,
						"rouge2_max,none": 29.183306916690952,
						"rouge2_max_stderr,none": 0.9443906575723012,
						"rougeL_acc,none": 0.25703794369645044,
						"rougeL_acc_stderr,none": 0.00023403117548621342,
						"rougeL_diff,none": -8.216777978832019,
						"rougeL_diff_stderr,none": 0.7636650717651497,
						"rougeL_max,none": 43.54923154149236,
						"rougeL_max_stderr,none": 0.7660686840940987
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3023255813953488,
						"bleu_acc_stderr,none": 0.01607750926613302,
						"bleu_diff,none": -5.36844737468472,
						"bleu_diff_stderr,none": 0.7194282741742178,
						"bleu_max,none": 22.216469952642846,
						"bleu_max_stderr,none": 0.7000684949407302,
						"rouge1_acc,none": 0.2692778457772338,
						"rouge1_acc_stderr,none": 0.01552856663708728,
						"rouge1_diff,none": -8.079096928330063,
						"rouge1_diff_stderr,none": 0.881816999179014,
						"rouge1_max,none": 46.2610076317151,
						"rouge1_max_stderr,none": 0.8750448385912115,
						"rouge2_acc,none": 0.21297429620563035,
						"rouge2_acc_stderr,none": 0.014332203787059688,
						"rouge2_diff,none": -9.522649753405604,
						"rouge2_diff_stderr,none": 0.9835555720402795,
						"rouge2_max,none": 29.183306916690952,
						"rouge2_max_stderr,none": 0.9717976422961219,
						"rougeL_acc,none": 0.25703794369645044,
						"rougeL_acc_stderr,none": 0.015298077509485086,
						"rougeL_diff,none": -8.216777978832019,
						"rougeL_diff_stderr,none": 0.8738793233422735,
						"rougeL_max,none": 43.54923154149236,
						"rougeL_max_stderr,none": 0.875253497047626
					},
					"truthfulqa_mc1": {
						"acc,none": 0.23255813953488372,
						"acc_stderr,none": 0.014789157531080527,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3907294979968995,
						"acc_stderr,none": 0.014223647853682376,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.02066929133858268,
						"exact_match_stderr,none": 0.003156984997714907
					},
					"wic": {
						"acc,none": 0.48589341692789967,
						"acc_stderr,none": 0.019802835228005834,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.7271127300547762,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6553229715896627,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 14.806917563131952,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.5730071033938438,
						"acc_stderr,none": 0.013901878072575058,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5070422535211268,
						"acc_stderr,none": 0.05975550263548289,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7216117216117216,
						"acc_stderr,none": 0.02717645531875414,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5265454545454545,
						"acc_stderr,none": 0.027440572119689203,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.02236139673920788,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.488,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668086,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.02227969410784342,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.022382357781962126,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.022324981738385256,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.02227969410784342,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.022261697292270132,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668093,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044292,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.38934404283801877,
						"acc_stderr,none": 0.044348257927515206,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3325301204819277,
						"acc_stderr,none": 0.00944319336590334,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3481927710843373,
						"acc_stderr,none": 0.00954898064915339,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.42289156626506025,
						"acc_stderr,none": 0.009902179034797443,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38353413654618473,
						"acc_stderr,none": 0.009746396613443769,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5196787148594377,
						"acc_stderr,none": 0.010014307727112695,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4506024096385542,
						"acc_stderr,none": 0.00997304277481168,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.45903614457831327,
						"acc_stderr,none": 0.009988381409296447,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3610441767068273,
						"acc_stderr,none": 0.009627269742195715,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.39879518072289155,
						"acc_stderr,none": 0.009814625416137578,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.009593947957927137,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3755020080321285,
						"acc_stderr,none": 0.009706422844379826,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.35582329317269074,
						"acc_stderr,none": 0.009596375814335277,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3481927710843373,
						"acc_stderr,none": 0.00954898064915339,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3827309236947791,
						"acc_stderr,none": 0.009742526340884072,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3461847389558233,
						"acc_stderr,none": 0.009536061379898332,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5178990433788581,
						"acc_stderr,none": 0.045133185798356706,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.47253474520185307,
						"acc_stderr,none": 0.012847698270388222,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.6591661151555261,
						"acc_stderr,none": 0.01219776735043312,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5360688285903376,
						"acc_stderr,none": 0.012833602406620018,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5089344804765056,
						"acc_stderr,none": 0.0128650709173208,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5056254136333554,
						"acc_stderr,none": 0.012866310923072506,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.48510919920582396,
						"acc_stderr,none": 0.012861417842074006,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4983454665784249,
						"acc_stderr,none": 0.012867054869163346,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.4990072799470549,
						"acc_stderr,none": 0.012867099955422942,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.4937127729980146,
						"acc_stderr,none": 0.012866108021218212,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5334215751158173,
						"acc_stderr,none": 0.01283834793473167,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5049636002647253,
						"acc_stderr,none": 0.012866491277589948,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.6826253090582153,
						"acc_stderr,none": 0.05882248701216535,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.7763440860215054,
						"acc_stderr,none": 0.008643691453616828,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.5542168674698795,
						"acc_stderr,none": 0.05489019318889363,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5641293013555787,
						"acc_stderr,none": 0.01602084474339302,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6197718631178707,
						"acc_stderr,none": 0.02999075562437352,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5619047619047619,
						"acc_stderr,none": 0.02799953368887838,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6051587301587301,
						"acc_stderr,none": 0.021795253713508076,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "EleutherAI/pythia-1.4b"
	},
	"EleutherAI/pythia-2.8b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5228297632468997,
						"acc_norm,none": 0.5008455467869222,
						"acc_norm_stderr,none": 0.04310600786168749,
						"acc_stderr,none": 0.05512890966366779,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.330625,
						"acc_stderr,none": 0.015164819103789548,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.01645,
						"acc_stderr,none": 0.016084819586951658,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.840179104477612,
						"acc_stderr,none": 0.14010053936660483,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2310549777117385,
						"acc_norm,none": 0.2310549777117385,
						"acc_norm_stderr,none": 0.10798817715416889,
						"acc_stderr,none": 0.10798817715416889,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.25487825936798475,
						"acc_norm,none": 0.25487825936798475,
						"acc_norm_stderr,none": 0.03540150683357085,
						"acc_stderr,none": 0.03540150683357085,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4993943798449614,
						"likelihood_diff_stderr,none": 0.4046976427894626,
						"pct_stereotype,none": 0.5566487775790101,
						"pct_stereotype_stderr,none": 0.08039935406617489
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.028051181102362203,
						"exact_match_stderr,none": 0.0036638890384170044
					},
					"glue": {
						"acc,none": 0.47311529918224926,
						"acc_stderr,none": 0.06487116785467484,
						"alias": "glue",
						"f1,none": 0.34251313536190403,
						"f1_stderr,none": 0.001940375564537564,
						"mcc,none": -0.02929206145132745,
						"mcc_stderr,none": 0.0001683322344400318
					},
					"kmmlu": {
						"acc,none": 0.10210799884493214,
						"acc_norm,none": 0.10210799884493214,
						"acc_norm_stderr,none": 0.06307902816412339,
						"acc_stderr,none": 0.06307902816412339,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.4834466125849594,
						"acc_norm,none": 0.438,
						"acc_norm_stderr,none": 0.0004932985971943903,
						"acc_stderr,none": 0.04064211671157527,
						"alias": "kobest",
						"f1,none": 0.38456301490933426,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5855812148263148,
						"acc_stderr,none": 0.026557975805520998,
						"alias": "lambada",
						"perplexity,none": 6.876147978785402,
						"perplexity_stderr,none": 0.8569015450408451
					},
					"lambada_cloze": {
						"acc,none": 0.0392004657481079,
						"acc_stderr,none": 0.00404369449399942,
						"alias": "lambada_cloze",
						"perplexity,none": 739.5322117612484,
						"perplexity_stderr,none": 55.952995788387284
					},
					"lambada_multilingual": {
						"acc,none": 0.38758005045604504,
						"acc_stderr,none": 0.07317611433356291,
						"alias": "lambada_multilingual",
						"perplexity,none": 81.34809795035456,
						"perplexity_stderr,none": 24.14607306286632
					},
					"mmlu": {
						"acc,none": 0.24846887907705456,
						"acc_stderr,none": 0.04022013042020922,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.23209351753453772,
						"acc_stderr,none": 0.028347367229095175,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2832314129385259,
						"acc_stderr,none": 0.045459071097599495,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.23756906077348067,
						"acc_stderr,none": 0.03969558736913036,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.24928639391056137,
						"acc_stderr,none": 0.03953229114653707,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2928317955997161,
						"acc_norm,none": 0.25977146553829705,
						"acc_norm_stderr,none": 0.00011650390341082355,
						"acc_stderr,none": 0.09161875876814811,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4943571428571429,
						"acc_stderr,none": 0.03665540373436934,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.719695974637384,
						"acc_norm,none": 0.5077893072577018,
						"acc_norm_stderr,none": 0.004366813291240546,
						"acc_stderr,none": 0.14113836749113884,
						"alias": "pythia",
						"bits_per_byte,none": 0.6879124628049069,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6109508328215643,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.209053210087001,
						"perplexity_stderr,none": 0.1247111896496628,
						"word_perplexity,none": 12.80449046297832,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.34397163120567376,
						"acc_norm,none": 0.4219858156028369,
						"acc_norm_stderr,none": 0.055369934581736856,
						"acc_stderr,none": 0.0434497693392195,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.531895777178796,
						"acc_stderr,none": 0.01584511575849003,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3121786388536537,
						"acc_stderr,none": 0.04933363619568943,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.25458996328029376,
						"bleu_acc_stderr,none": 0.0002325660709279805,
						"bleu_diff,none": -8.173978639089455,
						"bleu_diff_stderr,none": 0.4969068574962525,
						"bleu_max,none": 22.47363531042342,
						"bleu_max_stderr,none": 0.48845642318506693,
						"rouge1_acc,none": 0.24969400244798043,
						"rouge1_acc_stderr,none": 0.00022959179851653248,
						"rouge1_diff,none": -10.101351686925462,
						"rouge1_diff_stderr,none": 0.5767620537494614,
						"rouge1_max,none": 48.34050922002757,
						"rouge1_max_stderr,none": 0.6596004139893255,
						"rouge2_acc,none": 0.19706242350061198,
						"rouge2_acc_stderr,none": 0.00019390787346161225,
						"rouge2_diff,none": -12.038921342576732,
						"rouge2_diff_stderr,none": 0.7680879718940578,
						"rouge2_max,none": 31.474065412214365,
						"rouge2_max_stderr,none": 0.8238352594030609,
						"rougeL_acc,none": 0.24112607099143207,
						"rougeL_acc_stderr,none": 0.00022424545205841482,
						"rougeL_diff,none": -10.428047724405584,
						"rougeL_diff_stderr,none": 0.5891217124707498,
						"rougeL_max,none": 45.18258407066327,
						"rougeL_max_stderr,none": 0.6740874352807857
					},
					"xcopa": {
						"acc,none": 0.5374545454545454,
						"acc_stderr,none": 0.026941327317303136,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.39951807228915664,
						"acc_stderr,none": 0.04755225529958157,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5354070152217075,
						"acc_stderr,none": 0.05167502277162221,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7145425938413127,
						"acc_stderr,none": 0.07117398054154908,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5228297632468997,
						"acc_norm,none": 0.5008455467869222,
						"acc_norm_stderr,none": 0.04310600786168749,
						"acc_stderr,none": 0.05512890966366779,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.330625,
						"acc_stderr,none": 0.015164819103789548,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.331,
						"acc_stderr,none": 0.014888272588203943,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.321,
						"acc_stderr,none": 0.014770821817934647,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3383333333333333,
						"acc_stderr,none": 0.013664144006618268,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.2901023890784983,
						"acc_norm,none": 0.3216723549488055,
						"acc_norm_stderr,none": 0.013650488084494164,
						"acc_stderr,none": 0.013261573677520773,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6376262626262627,
						"acc_norm,none": 0.5892255892255892,
						"acc_norm_stderr,none": 0.010095101349348646,
						"acc_stderr,none": 0.009863468202583778,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.01645,
						"acc_stderr,none": 0.016084819586951658,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0295,
						"acc_stderr,none": 0.003784446593361908,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.043,
						"acc_stderr,none": 0.004537156917767891,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0365,
						"acc_stderr,none": 0.004194361850826339,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0505,
						"acc_stderr,none": 0.004897639067368747,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.0007069298939339458,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0035,
						"acc_stderr,none": 0.0013208888574315794,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000151,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.003036876355748373,
						"acc_stderr,none": 0.0011463358249986905,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.840179104477612,
						"acc_stderr,none": 0.14010053936660483,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.00896305396259208,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437733,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.00199699473909873,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.012486268734370143,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491129,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.013607356839598126,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.607,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.838,
						"acc_stderr,none": 0.011657267771304412,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357793,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298237,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.0050348137353182325,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118775,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584942,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177549,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333344,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611489,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.00985982840703719,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.751,
						"acc_stderr,none": 0.013681600278702294,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.821,
						"acc_stderr,none": 0.012128730605719097,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.01138950045966554,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592078,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.011297239823409312,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727029,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.368,
						"acc_stderr,none": 0.0152580735615218,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.01001655286669685,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.796,
						"acc_stderr,none": 0.012749374359024386,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.014498627873361423,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341676,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745894,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651523,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140933,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118587,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.654,
						"acc_stderr,none": 0.015050266127564438,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103293,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.014111099288259587,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.656,
						"acc_stderr,none": 0.015029633724408947,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.718,
						"acc_stderr,none": 0.014236526215291341,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.011075814808567038,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.634,
						"acc_stderr,none": 0.015240612726405752,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524324,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024952,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.01255952792670737,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.971,
						"acc_stderr,none": 0.0053091606857570035,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578106,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.01060525678479657,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877364,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.411,
						"acc_stderr,none": 0.015566673418599271,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118751,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.00733517585370685,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689092,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.776,
						"acc_stderr,none": 0.013190830072364457,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.482,
						"acc_stderr,none": 0.015809045699406728,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.00896305396259206,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904623,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.711,
						"acc_stderr,none": 0.01434171135829619,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.01070937396352801,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823333,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.013127502859696251,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.010811655372416051,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.00627362402111875,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.00909954953840024,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275292,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666699,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.015809045699406728,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.411,
						"acc_stderr,none": 0.01556667341859927,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6431192660550459,
						"acc_stderr,none": 0.008379147807636291,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.4107142857142857,
						"acc_stderr,none": 0.06633634150359538,
						"alias": "cb",
						"f1,none": 0.2571428571428571,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2310549777117385,
						"acc_norm,none": 0.2310549777117385,
						"acc_norm_stderr,none": 0.10798817715416889,
						"acc_stderr,none": 0.10798817715416889,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141221,
						"acc_stderr,none": 0.06372446937141221,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.42424242424242425,
						"acc_norm,none": 0.42424242424242425,
						"acc_norm_stderr,none": 0.08736789844447573,
						"acc_stderr,none": 0.08736789844447573,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736841,
						"acc_stderr,none": 0.05263157894736841,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757576,
						"acc_stderr,none": 0.07575757575757576,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482894,
						"acc_stderr,none": 0.09810018692482894,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.06429065810876616,
						"acc_stderr,none": 0.06429065810876616,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.059278386873217015,
						"acc_stderr,none": 0.059278386873217015,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.1891891891891892,
						"acc_norm,none": 0.1891891891891892,
						"acc_norm_stderr,none": 0.06527647182968216,
						"acc_stderr,none": 0.06527647182968216,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.16129032258064516,
						"acc_norm,none": 0.16129032258064516,
						"acc_norm_stderr,none": 0.06715051611181073,
						"acc_stderr,none": 0.06715051611181073,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09933992677987828,
						"acc_stderr,none": 0.09933992677987828,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.0878051853075513,
						"acc_stderr,none": 0.0878051853075513,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.11236664374387367,
						"acc_stderr,none": 0.11236664374387367,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633637,
						"acc_stderr,none": 0.09169709590633637,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.06957698714453991,
						"acc_stderr,none": 0.06957698714453991,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.06148754619013454,
						"acc_stderr,none": 0.06148754619013454,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.25487825936798475,
						"acc_norm,none": 0.25487825936798475,
						"acc_norm_stderr,none": 0.03540150683357085,
						"acc_stderr,none": 0.03540150683357085,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.033603007963315286,
						"acc_stderr,none": 0.033603007963315286,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.0340150671524904,
						"acc_stderr,none": 0.0340150671524904,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2535885167464115,
						"acc_norm,none": 0.2535885167464115,
						"acc_norm_stderr,none": 0.030166316298847997,
						"acc_stderr,none": 0.030166316298847997,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03489370652018761,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.25190839694656486,
						"acc_norm,none": 0.25190839694656486,
						"acc_norm_stderr,none": 0.03807387116306086,
						"acc_stderr,none": 0.03807387116306086,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2426470588235294,
						"acc_norm,none": 0.2426470588235294,
						"acc_norm_stderr,none": 0.03689519326996807,
						"acc_stderr,none": 0.03689519326996807,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2803738317757009,
						"acc_norm,none": 0.2803738317757009,
						"acc_norm_stderr,none": 0.04362839933570099,
						"acc_stderr,none": 0.04362839933570099,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.26006191950464397,
						"acc_norm,none": 0.26006191950464397,
						"acc_norm_stderr,none": 0.02444601845721647,
						"acc_stderr,none": 0.02444601845721647,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604257,
						"acc_stderr,none": 0.030587591351604257,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2737430167597765,
						"acc_norm,none": 0.2737430167597765,
						"acc_norm_stderr,none": 0.03342001835130119,
						"acc_stderr,none": 0.03342001835130119,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.02830465794303529,
						"acc_stderr,none": 0.02830465794303529,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.04556837693674772,
						"acc_stderr,none": 0.04556837693674772,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.29245283018867924,
						"acc_norm,none": 0.29245283018867924,
						"acc_norm_stderr,none": 0.044392639061996274,
						"acc_stderr,none": 0.044392639061996274,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.24074074074074073,
						"acc_norm,none": 0.24074074074074073,
						"acc_norm_stderr,none": 0.04133119440243839,
						"acc_stderr,none": 0.04133119440243839,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.041764667586049006,
						"acc_stderr,none": 0.041764667586049006,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.23443223443223443,
						"acc_norm,none": 0.23443223443223443,
						"acc_norm_stderr,none": 0.025687156459084187,
						"acc_stderr,none": 0.025687156459084187,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.24509803921568626,
						"acc_norm,none": 0.24509803921568626,
						"acc_norm_stderr,none": 0.030190282453501954,
						"acc_stderr,none": 0.030190282453501954,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.03377310252209194,
						"acc_stderr,none": 0.03377310252209194,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.25170068027210885,
						"acc_norm,none": 0.25170068027210885,
						"acc_norm_stderr,none": 0.03591728013761648,
						"acc_stderr,none": 0.03591728013761648,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.23741007194244604,
						"acc_norm,none": 0.23741007194244604,
						"acc_norm_stderr,none": 0.036220593237998276,
						"acc_stderr,none": 0.036220593237998276,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.25157232704402516,
						"acc_norm,none": 0.25157232704402516,
						"acc_norm_stderr,none": 0.03452055811164904,
						"acc_stderr,none": 0.03452055811164904,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25153374233128833,
						"acc_norm,none": 0.25153374233128833,
						"acc_norm_stderr,none": 0.03408997886857529,
						"acc_stderr,none": 0.03408997886857529,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.028394293050790515,
						"acc_stderr,none": 0.028394293050790515,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.26262626262626265,
						"acc_norm,none": 0.26262626262626265,
						"acc_norm_stderr,none": 0.031353050095330855,
						"acc_stderr,none": 0.031353050095330855,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2815126050420168,
						"acc_norm,none": 0.2815126050420168,
						"acc_norm_stderr,none": 0.02921354941437216,
						"acc_stderr,none": 0.02921354941437216,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2782608695652174,
						"acc_norm,none": 0.2782608695652174,
						"acc_norm_stderr,none": 0.029614094221633722,
						"acc_stderr,none": 0.029614094221633722,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.25874125874125875,
						"acc_norm,none": 0.25874125874125875,
						"acc_norm_stderr,none": 0.03675137438900237,
						"acc_stderr,none": 0.03675137438900237,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032732683535398856,
						"acc_stderr,none": 0.032732683535398856,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2348993288590604,
						"acc_norm,none": 0.2348993288590604,
						"acc_norm_stderr,none": 0.034847315046501876,
						"acc_stderr,none": 0.034847315046501876,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.23728813559322035,
						"acc_norm,none": 0.23728813559322035,
						"acc_norm_stderr,none": 0.039330125499343824,
						"acc_stderr,none": 0.039330125499343824,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.25874125874125875,
						"acc_norm,none": 0.25874125874125875,
						"acc_norm_stderr,none": 0.03675137438900236,
						"acc_stderr,none": 0.03675137438900236,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604674,
						"acc_stderr,none": 0.03893259610604674,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.23783783783783785,
						"acc_norm,none": 0.23783783783783785,
						"acc_norm_stderr,none": 0.0313873936833048,
						"acc_stderr,none": 0.0313873936833048,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2616279069767442,
						"acc_norm,none": 0.2616279069767442,
						"acc_norm_stderr,none": 0.033611014038904936,
						"acc_stderr,none": 0.033611014038904936,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25060827250608275,
						"acc_norm,none": 0.25060827250608275,
						"acc_norm_stderr,none": 0.021402288814095338,
						"acc_stderr,none": 0.021402288814095338,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.029761395837435988,
						"acc_stderr,none": 0.029761395837435988,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.25203252032520324,
						"acc_norm,none": 0.25203252032520324,
						"acc_norm_stderr,none": 0.039308795268239924,
						"acc_stderr,none": 0.039308795268239924,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2459016393442623,
						"acc_norm,none": 0.2459016393442623,
						"acc_norm_stderr,none": 0.03914731903595733,
						"acc_stderr,none": 0.03914731903595733,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2523809523809524,
						"acc_norm,none": 0.2523809523809524,
						"acc_norm_stderr,none": 0.03004659915603149,
						"acc_stderr,none": 0.03004659915603149,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25555555555555554,
						"acc_norm,none": 0.25555555555555554,
						"acc_norm_stderr,none": 0.032601103040276455,
						"acc_stderr,none": 0.032601103040276455,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871163,
						"acc_stderr,none": 0.03152480234871163,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2482758620689655,
						"acc_norm,none": 0.2482758620689655,
						"acc_norm_stderr,none": 0.03600105692727771,
						"acc_stderr,none": 0.03600105692727771,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055042,
						"acc_stderr,none": 0.04232473532055042,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.032888897342098225,
						"acc_stderr,none": 0.032888897342098225,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846666,
						"acc_stderr,none": 0.030469670650846666,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2473404255319149,
						"acc_norm,none": 0.2473404255319149,
						"acc_norm_stderr,none": 0.022280822212812246,
						"acc_stderr,none": 0.022280822212812246,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.028490144114909487,
						"acc_stderr,none": 0.028490144114909487,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.22988505747126436,
						"acc_norm,none": 0.22988505747126436,
						"acc_norm_stderr,none": 0.03198969467577206,
						"acc_stderr,none": 0.03198969467577206,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.028952167450890808,
						"acc_stderr,none": 0.028952167450890808,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.03253020905593335,
						"acc_stderr,none": 0.03253020905593335,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2484472049689441,
						"acc_norm,none": 0.2484472049689441,
						"acc_norm_stderr,none": 0.03416149068322981,
						"acc_stderr,none": 0.03416149068322981,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.02929206145132745,
						"mcc_stderr,none": 0.01297429128854566
					},
					"copa": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4993943798449614,
						"likelihood_diff_stderr,none": 0.4046976427894626,
						"pct_stereotype,none": 0.5566487775790101,
						"pct_stereotype_stderr,none": 0.08039935406617489
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.4700357781753133,
						"likelihood_diff_stderr,none": 0.08553293385039051,
						"pct_stereotype,none": 0.616577221228384,
						"pct_stereotype_stderr,none": 0.011876697253175876
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.9285714285714284,
						"likelihood_diff_stderr,none": 0.38956328109029437,
						"pct_stereotype,none": 0.6043956043956044,
						"pct_stereotype_stderr,none": 0.05154303032773002
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 4.590909090909091,
						"likelihood_diff_stderr,none": 1.4578442706117043,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.696153846153846,
						"likelihood_diff_stderr,none": 0.6367994904020663,
						"pct_stereotype,none": 0.7076923076923077,
						"pct_stereotype_stderr,none": 0.05685286730420954
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.633203125,
						"likelihood_diff_stderr,none": 0.17327489088581116,
						"pct_stereotype,none": 0.60625,
						"pct_stereotype_stderr,none": 0.027355258158219254
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.2841435185185186,
						"likelihood_diff_stderr,none": 0.23532717098126185,
						"pct_stereotype,none": 0.5185185185185185,
						"pct_stereotype_stderr,none": 0.034076320938540516
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.734375,
						"likelihood_diff_stderr,none": 0.334147859537437,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.300442913385827,
						"likelihood_diff_stderr,none": 0.14481933582618484,
						"pct_stereotype,none": 0.5492125984251969,
						"pct_stereotype_stderr,none": 0.02209795835867595
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.68018018018018,
						"likelihood_diff_stderr,none": 0.3618662549023483,
						"pct_stereotype,none": 0.7297297297297297,
						"pct_stereotype_stderr,none": 0.04234321361084539
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.272849462365591,
						"likelihood_diff_stderr,none": 0.42072566930719474,
						"pct_stereotype,none": 0.8279569892473119,
						"pct_stereotype_stderr,none": 0.039348528120618634
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 3.914473684210526,
						"likelihood_diff_stderr,none": 0.2360758358258764,
						"pct_stereotype,none": 0.6473684210526316,
						"pct_stereotype_stderr,none": 0.03475405259582098
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.5282871198568873,
						"likelihood_diff_stderr,none": 0.086043593507212,
						"pct_stereotype,none": 0.49850924269528923,
						"pct_stereotype_stderr,none": 0.012213244933899683
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.3333333333333335,
						"likelihood_diff_stderr,none": 0.3403005482967025,
						"pct_stereotype,none": 0.5222222222222223,
						"pct_stereotype_stderr,none": 0.05294752255076824
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.923076923076923,
						"likelihood_diff_stderr,none": 0.7296296782996299,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.5606060606060606,
						"likelihood_diff_stderr,none": 0.44528940217766544,
						"pct_stereotype,none": 0.6363636363636364,
						"pct_stereotype_stderr,none": 0.05966637484671758
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.1370716510903427,
						"likelihood_diff_stderr,none": 0.19341258800631192,
						"pct_stereotype,none": 0.5015576323987538,
						"pct_stereotype_stderr,none": 0.02795071408867036
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.092885375494071,
						"likelihood_diff_stderr,none": 0.21698480833183167,
						"pct_stereotype,none": 0.33201581027667987,
						"pct_stereotype_stderr,none": 0.02966621936547489
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.423611111111111,
						"likelihood_diff_stderr,none": 0.4607713350698461,
						"pct_stereotype,none": 0.625,
						"pct_stereotype_stderr,none": 0.05745481997211521
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.1288043478260867,
						"likelihood_diff_stderr,none": 0.1513423226920661,
						"pct_stereotype,none": 0.41304347826086957,
						"pct_stereotype_stderr,none": 0.022982353907431453
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.4445652173913044,
						"likelihood_diff_stderr,none": 0.3577863042819299,
						"pct_stereotype,none": 0.6347826086956522,
						"pct_stereotype_stderr,none": 0.04509577025262067
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 4.3282967032967035,
						"likelihood_diff_stderr,none": 0.35862523437262067,
						"pct_stereotype,none": 0.7472527472527473,
						"pct_stereotype_stderr,none": 0.04580951853732889
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.8526785714285716,
						"likelihood_diff_stderr,none": 0.28701110087209086,
						"pct_stereotype,none": 0.5918367346938775,
						"pct_stereotype_stderr,none": 0.03519659177561531
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.028051181102362203,
						"exact_match_stderr,none": 0.0036638890384170044
					},
					"glue": {
						"acc,none": 0.47311529918224926,
						"acc_stderr,none": 0.06487116785467484,
						"alias": "glue",
						"f1,none": 0.34251313536190403,
						"f1_stderr,none": 0.001940375564537564,
						"mcc,none": -0.02929206145132745,
						"mcc_stderr,none": 0.0001683322344400318
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.019711902956785442,
						"exact_match_stderr,get-answer": 0.0038289829787357004
					},
					"hellaswag": {
						"acc,none": 0.4523003385779725,
						"acc_norm,none": 0.5905198167695678,
						"acc_norm_stderr,none": 0.004907329270272706,
						"acc_stderr,none": 0.004967023435680014,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.10210799884493214,
						"acc_norm,none": 0.10210799884493214,
						"acc_norm_stderr,none": 0.06307902816412339,
						"acc_stderr,none": 0.06307902816412339,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.03861229196653697,
						"acc_stderr,none": 0.03861229196653697,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.089,
						"acc_norm,none": 0.089,
						"acc_norm_stderr,none": 0.009008893392651532,
						"acc_stderr,none": 0.009008893392651532,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.079,
						"acc_norm,none": 0.079,
						"acc_norm_stderr,none": 0.00853415677333344,
						"acc_stderr,none": 0.00853415677333344,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.194,
						"acc_norm,none": 0.194,
						"acc_norm_stderr,none": 0.012510816141264368,
						"acc_stderr,none": 0.012510816141264368,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.206,
						"acc_norm,none": 0.206,
						"acc_norm_stderr,none": 0.012795613612786529,
						"acc_stderr,none": 0.012795613612786529,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.01569747382460385,
						"acc_stderr,none": 0.01569747382460385,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.022,
						"acc_norm,none": 0.022,
						"acc_norm_stderr,none": 0.004640855259274701,
						"acc_stderr,none": 0.004640855259274701,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.012,
						"acc_norm,none": 0.012,
						"acc_norm_stderr,none": 0.003444977194099824,
						"acc_stderr,none": 0.003444977194099824,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.029,
						"acc_norm,none": 0.029,
						"acc_norm_stderr,none": 0.005309160685756993,
						"acc_stderr,none": 0.005309160685756993,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.037,
						"acc_norm,none": 0.037,
						"acc_norm_stderr,none": 0.005972157622389647,
						"acc_stderr,none": 0.005972157622389647,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.040347329239296424,
						"acc_stderr,none": 0.040347329239296424,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.025,
						"acc_norm,none": 0.025,
						"acc_norm_stderr,none": 0.004939574819698461,
						"acc_stderr,none": 0.004939574819698461,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.027,
						"acc_norm,none": 0.027,
						"acc_norm_stderr,none": 0.005128089049275289,
						"acc_stderr,none": 0.005128089049275289,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.177,
						"acc_norm,none": 0.177,
						"acc_norm_stderr,none": 0.012075463420375061,
						"acc_stderr,none": 0.012075463420375061,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.027,
						"acc_norm,none": 0.027,
						"acc_norm_stderr,none": 0.005128089049275292,
						"acc_stderr,none": 0.005128089049275292,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.13,
						"acc_norm,none": 0.13,
						"acc_norm_stderr,none": 0.010640169792499356,
						"acc_stderr,none": 0.010640169792499356,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.112,
						"acc_norm,none": 0.112,
						"acc_norm_stderr,none": 0.009977753031397238,
						"acc_stderr,none": 0.009977753031397238,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.081,
						"acc_norm,none": 0.081,
						"acc_norm_stderr,none": 0.00863212103213996,
						"acc_stderr,none": 0.00863212103213996,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.079,
						"acc_norm,none": 0.079,
						"acc_norm_stderr,none": 0.008534156773333442,
						"acc_stderr,none": 0.008534156773333442,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.024,
						"acc_norm,none": 0.024,
						"acc_norm_stderr,none": 0.004842256441727051,
						"acc_stderr,none": 0.004842256441727051,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.038,
						"acc_norm,none": 0.038,
						"acc_norm_stderr,none": 0.006049181150584934,
						"acc_stderr,none": 0.006049181150584934,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.059,
						"acc_norm,none": 0.059,
						"acc_norm_stderr,none": 0.00745483565040673,
						"acc_stderr,none": 0.00745483565040673,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.231,
						"acc_norm,none": 0.231,
						"acc_norm_stderr,none": 0.013334797216936426,
						"acc_stderr,none": 0.013334797216936426,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.072,
						"acc_norm,none": 0.072,
						"acc_norm_stderr,none": 0.008178195576218681,
						"acc_stderr,none": 0.008178195576218681,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.187,
						"acc_norm,none": 0.187,
						"acc_norm_stderr,none": 0.012336254828074144,
						"acc_stderr,none": 0.012336254828074144,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.14333333333333334,
						"acc_norm,none": 0.14333333333333334,
						"acc_norm_stderr,none": 0.014317464782955343,
						"acc_stderr,none": 0.014317464782955343,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.102,
						"acc_norm,none": 0.102,
						"acc_norm_stderr,none": 0.009575368801653897,
						"acc_stderr,none": 0.009575368801653897,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.106,
						"acc_norm,none": 0.106,
						"acc_norm_stderr,none": 0.009739551265785134,
						"acc_stderr,none": 0.009739551265785134,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.069,
						"acc_norm,none": 0.069,
						"acc_norm_stderr,none": 0.008018934050315158,
						"acc_stderr,none": 0.008018934050315158,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.086,
						"acc_norm,none": 0.086,
						"acc_norm_stderr,none": 0.008870325962594766,
						"acc_stderr,none": 0.008870325962594766,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22666666666666666,
						"acc_norm,none": 0.22666666666666666,
						"acc_norm_stderr,none": 0.024212609617951908,
						"acc_stderr,none": 0.024212609617951908,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.013394902889660013,
						"acc_stderr,none": 0.013394902889660013,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.052,
						"acc_norm,none": 0.052,
						"acc_norm_stderr,none": 0.007024624213817149,
						"acc_stderr,none": 0.007024624213817149,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.118,
						"acc_norm,none": 0.118,
						"acc_norm_stderr,none": 0.010206869264381795,
						"acc_stderr,none": 0.010206869264381795,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.027234326551496862,
						"acc_stderr,none": 0.027234326551496862,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.143,
						"acc_norm,none": 0.143,
						"acc_norm_stderr,none": 0.011075814808567038,
						"acc_stderr,none": 0.011075814808567038,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.154,
						"acc_norm,none": 0.154,
						"acc_norm_stderr,none": 0.011419913065098708,
						"acc_stderr,none": 0.011419913065098708,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.026,
						"acc_norm,none": 0.026,
						"acc_norm_stderr,none": 0.005034813735318245,
						"acc_stderr,none": 0.005034813735318245,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.4834466125849594,
						"acc_norm,none": 0.438,
						"acc_norm_stderr,none": 0.0004932985971943903,
						"acc_stderr,none": 0.04064211671157527,
						"alias": "kobest",
						"f1,none": 0.38456301490933426,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5064102564102564,
						"acc_stderr,none": 0.013347670414620429,
						"alias": " - kobest_boolq",
						"f1,none": 0.35687728164760485,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.015819299929208316,
						"alias": " - kobest_copa",
						"f1,none": 0.4991164414026342,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.342,
						"acc_norm,none": 0.438,
						"acc_norm_stderr,none": 0.022210326363977417,
						"acc_stderr,none": 0.021236147199899254,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.33838362523396004,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5239294710327456,
						"acc_stderr,none": 0.025097153668550934,
						"alias": " - kobest_sentineg",
						"f1,none": 0.43160693589073473,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5855812148263148,
						"acc_stderr,none": 0.026557975805520998,
						"alias": "lambada",
						"perplexity,none": 6.876147978785402,
						"perplexity_stderr,none": 0.8569015450408451
					},
					"lambada_cloze": {
						"acc,none": 0.0392004657481079,
						"acc_stderr,none": 0.00404369449399942,
						"alias": "lambada_cloze",
						"perplexity,none": 739.5322117612484,
						"perplexity_stderr,none": 55.952995788387284
					},
					"lambada_multilingual": {
						"acc,none": 0.38758005045604504,
						"acc_stderr,none": 0.07317611433356291,
						"alias": "lambada_multilingual",
						"perplexity,none": 81.34809795035456,
						"perplexity_stderr,none": 24.14607306286632
					},
					"lambada_openai": {
						"acc,none": 0.6382689695323113,
						"acc_stderr,none": 0.0066943254346452105,
						"alias": " - lambada_openai",
						"perplexity,none": 5.209053210087001,
						"perplexity_stderr,none": 0.1247111896496628
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.04521637880846109,
						"acc_stderr,none": 0.002894759195991714,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 643.5633709874808,
						"perplexity_stderr,none": 26.92313101483762
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.2835241606830972,
						"acc_stderr,none": 0.006279251594000099,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 113.05826168328184,
						"perplexity_stderr,none": 6.877590579268446
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6363283524160683,
						"acc_stderr,none": 0.006702046426712479,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 5.226991460032617,
						"perplexity_stderr,none": 0.12557532726676449
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.3101106151756258,
						"acc_stderr,none": 0.006444068085916527,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 108.58731808216238,
						"perplexity_stderr,none": 6.171052955544645
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.3741509800116437,
						"acc_stderr,none": 0.006741713687835725,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 72.24165665866826,
						"perplexity_stderr,none": 4.20352554224905
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.33378614399379003,
						"acc_stderr,none": 0.006569813716190044,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 107.62626186762763,
						"perplexity_stderr,none": 6.632286288964351
					},
					"lambada_standard": {
						"acc,none": 0.5342518921016883,
						"acc_stderr,none": 0.006949613576318102,
						"alias": " - lambada_standard",
						"perplexity,none": 8.547852081016213,
						"perplexity_stderr,none": 0.23548344762053683
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.033184552687754706,
						"acc_stderr,none": 0.0024954670920501465,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 835.501052535016,
						"perplexity_stderr,none": 30.517620104506726
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.2589058524173028,
						"exact_match_stderr,get-answer": 0.01105145686861053
					},
					"logiqa": {
						"acc,none": 0.20583717357910905,
						"acc_norm,none": 0.28417818740399386,
						"acc_norm_stderr,none": 0.01769054268019078,
						"acc_stderr,none": 0.015858423219323882,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.22519083969465647,
						"acc_norm,none": 0.2741730279898219,
						"acc_norm_stderr,none": 0.011254878812320587,
						"acc_stderr,none": 0.010538641739267844,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2629815745393635,
						"acc_norm,none": 0.2529313232830821,
						"acc_norm_stderr,none": 0.007957601054295455,
						"acc_stderr,none": 0.008059394672720415,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3475958483372167,
						"acc_stderr,none": 0.004901021972070537,
						"alias": "mc_taco",
						"f1,none": 0.5011337868480725,
						"f1_stderr,none": 0.005515869658550106
					},
					"medmcqa": {
						"acc,none": 0.2679894812335644,
						"acc_norm,none": 0.2679894812335644,
						"acc_norm_stderr,none": 0.006848974049015552,
						"acc_stderr,none": 0.006848974049015552,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.24666142969363708,
						"acc_norm,none": 0.24666142969363708,
						"acc_norm_stderr,none": 0.012086544860415467,
						"acc_stderr,none": 0.012086544860415467,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.24846887907705456,
						"acc_stderr,none": 0.04022013042020922,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.18518518518518517,
						"acc_stderr,none": 0.03355677216313139,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.24342105263157895,
						"acc_stderr,none": 0.034923496688842384,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2490566037735849,
						"acc_stderr,none": 0.02661648298050171,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2638888888888889,
						"acc_stderr,none": 0.03685651095897532,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421276,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.23121387283236994,
						"acc_stderr,none": 0.032147373020294696,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171452,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2936170212765957,
						"acc_stderr,none": 0.02977164271249123,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.19298245614035087,
						"acc_stderr,none": 0.03712454853721368,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.21379310344827587,
						"acc_stderr,none": 0.034165204477475494,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.022019080012217883,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.19047619047619047,
						"acc_stderr,none": 0.035122074123020534,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.25806451612903225,
						"acc_stderr,none": 0.024892469172462836,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2561576354679803,
						"acc_stderr,none": 0.030712730070982592,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.20606060606060606,
						"acc_stderr,none": 0.03158415324047708,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.1919191919191919,
						"acc_stderr,none": 0.028057791672989017,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.20725388601036268,
						"acc_stderr,none": 0.02925282329180363,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.22564102564102564,
						"acc_stderr,none": 0.021193632525148522,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.26296296296296295,
						"acc_stderr,none": 0.02684205787383371,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.21428571428571427,
						"acc_stderr,none": 0.026653531596715487,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.26788990825688075,
						"acc_stderr,none": 0.018987462257978652,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.19907407407407407,
						"acc_stderr,none": 0.027232298462690232,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.22058823529411764,
						"acc_stderr,none": 0.02910225438967409,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.20675105485232068,
						"acc_stderr,none": 0.026361651668389083,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.336322869955157,
						"acc_stderr,none": 0.031708824268455,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.25190839694656486,
						"acc_stderr,none": 0.03807387116306085,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.23209351753453772,
						"acc_stderr,none": 0.028347367229095175,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2231404958677686,
						"acc_stderr,none": 0.03800754475228733,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.26851851851851855,
						"acc_stderr,none": 0.04284467968052191,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2147239263803681,
						"acc_stderr,none": 0.03226219377286774,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.23214285714285715,
						"acc_stderr,none": 0.04007341809755806,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2912621359223301,
						"acc_stderr,none": 0.044986763205729224,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.029343114798094438,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.28735632183908044,
						"acc_stderr,none": 0.0161824107306827,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.23699421965317918,
						"acc_stderr,none": 0.02289408248992599,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24692737430167597,
						"acc_stderr,none": 0.014422292204808854,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.02428861946604609,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2832314129385259,
						"acc_stderr,none": 0.045459071097599495,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.18971061093247588,
						"acc_stderr,none": 0.022268196258783218,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.25308641975308643,
						"acc_stderr,none": 0.024191808600713,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25886524822695034,
						"acc_stderr,none": 0.026129572527180848,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2333767926988266,
						"acc_stderr,none": 0.01080310848117908,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3897058823529412,
						"acc_stderr,none": 0.0296246635811597,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.26143790849673204,
						"acc_stderr,none": 0.017776947157528044,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.37272727272727274,
						"acc_stderr,none": 0.04631381319425464,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.1836734693877551,
						"acc_stderr,none": 0.02478907133200765,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.23756906077348067,
						"acc_stderr,none": 0.03969558736913036,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.23383084577114427,
						"acc_stderr,none": 0.029929415408348384,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.24928639391056137,
						"acc_stderr,none": 0.03953229114653707,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.040201512610368445,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3072289156626506,
						"acc_stderr,none": 0.035915667978246635,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.27485380116959063,
						"acc_stderr,none": 0.03424042924691584,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.36067244014263883,
						"acc_stderr,none": 0.004847247165239827,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.35608218063466235,
						"acc_stderr,none": 0.0048293812786578,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6348039215686274,
						"acc_stderr,none": 0.023866330396787986,
						"alias": "mrpc",
						"f1,none": 0.7738998482549317,
						"f1_stderr,none": 0.018028206018289455
					},
					"multimedqa": {
						"acc,none": 0.2928317955997161,
						"acc_norm,none": 0.25977146553829705,
						"acc_norm_stderr,none": 0.00011650390341082355,
						"acc_stderr,none": 0.09161875876814811,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5705445544554455,
						"acc_stderr,none": 0.007109962816183243,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6630925527073729,
						"mrr_stderr,none": 0.010315273792354897,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.43905191873589167,
						"r@2_stderr,none": 0.016681981598282932
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6297027858197016,
						"mrr_stderr,none": 0.010414523654016189,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.47404063205417607,
						"r@2_stderr,none": 0.016784648326758043
					},
					"openbookqa": {
						"acc,none": 0.24,
						"acc_norm,none": 0.356,
						"acc_norm_stderr,none": 0.02143471235607264,
						"acc_stderr,none": 0.019118866653759746,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.446,
						"acc_stderr,none": 0.011117724672834362,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4225,
						"acc_stderr,none": 0.011047981894987798,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4625,
						"acc_stderr,none": 0.01115163909599229,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5385,
						"acc_stderr,none": 0.011149934327957058,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5525,
						"acc_stderr,none": 0.011121318125943093,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.537,
						"acc_stderr,none": 0.011152474561478174,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5015,
						"acc_stderr,none": 0.0111830856968392,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4943571428571429,
						"acc_stderr,none": 0.03665540373436934,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7388465723612623,
						"acc_norm,none": 0.735038084874864,
						"acc_norm_stderr,none": 0.010296557993316066,
						"acc_stderr,none": 0.010248738649935557,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.22518146883005977,
						"acc_norm,none": 0.27279035012809566,
						"acc_norm_stderr,none": 0.0032539994073945092,
						"acc_stderr,none": 0.003051683395537989,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.632,
						"acc_stderr,none": 0.02158898256835354,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.719695974637384,
						"acc_norm,none": 0.5077893072577018,
						"acc_norm_stderr,none": 0.004366813291240546,
						"acc_stderr,none": 0.14113836749113884,
						"alias": "pythia",
						"bits_per_byte,none": 0.6879124628049069,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6109508328215643,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.209053210087001,
						"perplexity_stderr,none": 0.1247111896496628,
						"word_perplexity,none": 12.80449046297832,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.34397163120567376,
						"acc_norm,none": 0.4219858156028369,
						"acc_norm_stderr,none": 0.055369934581736856,
						"acc_stderr,none": 0.0434497693392195,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.43333333333333335,
						"acc_norm,none": 0.5666666666666667,
						"acc_norm_stderr,none": 0.04542567625794981,
						"acc_stderr,none": 0.04542567625794981,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.0392039498715957,
						"acc_stderr,none": 0.03675892481369823,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.323943661971831,
						"acc_norm,none": 0.3591549295774648,
						"acc_norm_stderr,none": 0.028518338662384218,
						"acc_stderr,none": 0.027818452695811197,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4975288303130148,
						"acc_stderr,none": 0.00676532792288251,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5199109572099926,
						"acc_stderr,none": 0.0024847281632880526,
						"alias": "qqp",
						"f1,none": 0.33096649662208744,
						"f1_stderr,none": 0.003568693039076901
					},
					"race": {
						"acc,none": 0.33875598086124403,
						"acc_stderr,none": 0.014647857789710087,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.49458483754512633,
						"acc_stderr,none": 0.030094698123239966,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.886,
						"acc_norm,none": 0.833,
						"acc_norm_stderr,none": 0.011800434324644608,
						"acc_stderr,none": 0.010055103435823335,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.4981949458483754,
						"acc_stderr,none": 0.030096267148976626,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.6915137614678899,
						"acc_stderr,none": 0.01564981592304773,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5225432370288914,
						"acc_norm,none": 0.7152854143756873,
						"acc_norm_stderr,none": 0.0031906213347444668,
						"acc_stderr,none": 0.0035314971690702532,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.531895777178796,
						"acc_stderr,none": 0.01584511575849003,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5115184294871795,
						"acc_stderr,none": 0.005002927378867353,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.5703861356035269,
						"acc_stderr,none": 0.004983713510652092,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5146078431372549,
						"acc_stderr,none": 0.004948866994549855,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3121786388536537,
						"acc_stderr,none": 0.04933363619568943,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.25458996328029376,
						"bleu_acc_stderr,none": 0.0002325660709279805,
						"bleu_diff,none": -8.173978639089455,
						"bleu_diff_stderr,none": 0.4969068574962525,
						"bleu_max,none": 22.47363531042342,
						"bleu_max_stderr,none": 0.48845642318506693,
						"rouge1_acc,none": 0.24969400244798043,
						"rouge1_acc_stderr,none": 0.00022959179851653248,
						"rouge1_diff,none": -10.101351686925462,
						"rouge1_diff_stderr,none": 0.5767620537494614,
						"rouge1_max,none": 48.34050922002757,
						"rouge1_max_stderr,none": 0.6596004139893255,
						"rouge2_acc,none": 0.19706242350061198,
						"rouge2_acc_stderr,none": 0.00019390787346161225,
						"rouge2_diff,none": -12.038921342576732,
						"rouge2_diff_stderr,none": 0.7680879718940578,
						"rouge2_max,none": 31.474065412214365,
						"rouge2_max_stderr,none": 0.8238352594030609,
						"rougeL_acc,none": 0.24112607099143207,
						"rougeL_acc_stderr,none": 0.00022424545205841482,
						"rougeL_diff,none": -10.428047724405584,
						"rougeL_diff_stderr,none": 0.5891217124707498,
						"rougeL_max,none": 45.18258407066327,
						"rougeL_max_stderr,none": 0.6740874352807857
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.25458996328029376,
						"bleu_acc_stderr,none": 0.015250117079156491,
						"bleu_diff,none": -8.173978639089455,
						"bleu_diff_stderr,none": 0.7049162060105105,
						"bleu_max,none": 22.47363531042342,
						"bleu_max_stderr,none": 0.6988965754566744,
						"rouge1_acc,none": 0.24969400244798043,
						"rouge1_acc_stderr,none": 0.015152286907148125,
						"rouge1_diff,none": -10.101351686925462,
						"rouge1_diff_stderr,none": 0.7594485194859896,
						"rouge1_max,none": 48.34050922002757,
						"rouge1_max_stderr,none": 0.8121578750398998,
						"rouge2_acc,none": 0.19706242350061198,
						"rouge2_acc_stderr,none": 0.013925080734473759,
						"rouge2_diff,none": -12.038921342576732,
						"rouge2_diff_stderr,none": 0.8764062824364381,
						"rouge2_max,none": 31.474065412214365,
						"rouge2_max_stderr,none": 0.907653711171315,
						"rougeL_acc,none": 0.24112607099143207,
						"rougeL_acc_stderr,none": 0.014974827279752339,
						"rougeL_diff,none": -10.428047724405584,
						"rougeL_diff_stderr,none": 0.767542645375975,
						"rougeL_max,none": 45.18258407066327,
						"rougeL_max_stderr,none": 0.8210282792211153
					},
					"truthfulqa_mc1": {
						"acc,none": 0.211750305997552,
						"acc_stderr,none": 0.014302068353925612,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3623928052817046,
						"acc_stderr,none": 0.013610142013582344,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.028051181102362203,
						"exact_match_stderr,none": 0.0036638890384170044
					},
					"wic": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.01981072129375818,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6879124628049069,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6109508328215643,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 12.80449046297832,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.5824782951854776,
						"acc_stderr,none": 0.013859978264440251,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4084507042253521,
						"acc_stderr,none": 0.05875113694257525,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.375,
						"acc_stderr,none": 0.04770204856076104,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7875457875457875,
						"acc_stderr,none": 0.024801967135031428,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5374545454545454,
						"acc_stderr,none": 0.026941327317303136,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.022361396739207888,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.022261697292270132,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.022231970696321122,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.02234794983266809,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044296,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228134,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668093,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.022324981738385253,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.39951807228915664,
						"acc_stderr,none": 0.04755225529958157,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3377510040160643,
						"acc_stderr,none": 0.009479742273956477,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3606425702811245,
						"acc_stderr,none": 0.009624937202075304,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4598393574297189,
						"acc_stderr,none": 0.009989691810169668,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38835341365461845,
						"acc_stderr,none": 0.009769028875673288,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5301204819277109,
						"acc_stderr,none": 0.010003871419517746,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.45582329317269077,
						"acc_stderr,none": 0.009982878443738434,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4650602409638554,
						"acc_stderr,none": 0.009997573294114558,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.36305220883534134,
						"acc_stderr,none": 0.009638823133984984,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.43734939759036146,
						"acc_stderr,none": 0.009943086374983841,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.35582329317269074,
						"acc_stderr,none": 0.009596375814335291,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3755020080321285,
						"acc_stderr,none": 0.009706422844379822,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.38795180722891565,
						"acc_stderr,none": 0.00976718134658639,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3485943775100402,
						"acc_stderr,none": 0.009551542053301816,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.38755020080321284,
						"acc_stderr,none": 0.009765326832218986,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3393574297188755,
						"acc_stderr,none": 0.009490727635646758,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5354070152217075,
						"acc_stderr,none": 0.05167502277162221,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.485771012574454,
						"acc_stderr,none": 0.012861913999596122,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.6882859033752482,
						"acc_stderr,none": 0.01191994318039934,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.585704831237591,
						"acc_stderr,none": 0.012676689821720669,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.513567174056916,
						"acc_stderr,none": 0.012862387586650075,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5109199205823958,
						"acc_stderr,none": 0.012864056278255038,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5076108537392455,
						"acc_stderr,none": 0.012865634571114483,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.49106551952349436,
						"acc_stderr,none": 0.012865070917320797,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5373924553275976,
						"acc_stderr,none": 0.012831093347016553,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.48974189278623426,
						"acc_stderr,none": 0.012864417047980475,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5493050959629384,
						"acc_stderr,none": 0.012804412720126678,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5301125082726671,
						"acc_stderr,none": 0.012843769248432169,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7145425938413127,
						"acc_stderr,none": 0.07117398054154908,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.824516129032258,
						"acc_stderr,none": 0.007890419963068872,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6144578313253012,
						"acc_stderr,none": 0.0537495779731939,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5683003128258602,
						"acc_stderr,none": 0.01600283988588422,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6273764258555133,
						"acc_stderr,none": 0.02987092117457781,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6063492063492063,
						"acc_stderr,none": 0.027570976518916924,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6150793650793651,
						"acc_stderr,none": 0.0216953760835214,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "EleutherAI/pythia-2.8b"
	},
	"EleutherAI/pythia-6.9b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5512965050732808,
						"acc_norm,none": 0.5231116121758738,
						"acc_norm_stderr,none": 0.04067358767317326,
						"acc_stderr,none": 0.054819797821025196,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3353125,
						"acc_stderr,none": 0.014704924886248286,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0271,
						"acc_stderr,none": 0.02575404424225429,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8348507462686567,
						"acc_stderr,none": 0.14844310370094302,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2310549777117385,
						"acc_norm,none": 0.2310549777117385,
						"acc_norm_stderr,none": 0.10736675941875201,
						"acc_stderr,none": 0.10736675941875201,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.25030219305819357,
						"acc_norm,none": 0.25030219305819357,
						"acc_norm_stderr,none": 0.03587964052027906,
						"acc_stderr,none": 0.03587964052027906,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.5168362403100777,
						"likelihood_diff_stderr,none": 0.44199934465292684,
						"pct_stereotype,none": 0.5691711389385808,
						"pct_stereotype_stderr,none": 0.08301646280854093
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.018208661417322834,
						"exact_match_stderr,none": 0.0029668367280168864
					},
					"glue": {
						"acc,none": 0.4765292994461509,
						"acc_stderr,none": 0.05922961391830872,
						"alias": "glue",
						"f1,none": 0.30815310544040614,
						"f1_stderr,none": 0.0023964122096718178,
						"mcc,none": -0.010723265836973498,
						"mcc_stderr,none": 0.0009487459569365491
					},
					"kmmlu": {
						"acc,none": 0.19275194917701416,
						"acc_norm,none": 0.19275194917701416,
						"acc_norm_stderr,none": 0.031843847251752495,
						"acc_stderr,none": 0.031843847251752495,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.4841043630782723,
						"acc_norm,none": 0.456,
						"acc_norm_stderr,none": 0.0004971222444889777,
						"acc_stderr,none": 0.04108291451305947,
						"alias": "kobest",
						"f1,none": 0.3856501304501679,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5620997477197749,
						"acc_stderr,none": 0.022108223425668115,
						"alias": "lambada",
						"perplexity,none": 7.169945572792913,
						"perplexity_stderr,none": 0.7220162901262329
					},
					"lambada_cloze": {
						"acc,none": 0.03483407723656123,
						"acc_stderr,none": 0.003297610989989496,
						"alias": "lambada_cloze",
						"perplexity,none": 649.9961081254019,
						"perplexity_stderr,none": 139.9723628603054
					},
					"lambada_multilingual": {
						"acc,none": 0.36743644478944304,
						"acc_stderr,none": 0.06807273135902886,
						"alias": "lambada_multilingual",
						"perplexity,none": 85.61923274283521,
						"perplexity_stderr,none": 24.934559721383756
					},
					"mmlu": {
						"acc,none": 0.25943597778094285,
						"acc_stderr,none": 0.03725574885708458,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.26588735387885226,
						"acc_stderr,none": 0.03326435372325513,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2594142259414226,
						"acc_stderr,none": 0.03711469955148084,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.24991875203119923,
						"acc_stderr,none": 0.03470814656840191,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2591183000317159,
						"acc_stderr,none": 0.044085467609407146,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2715400993612491,
						"acc_norm,none": 0.24405994633802958,
						"acc_norm_stderr,none": 0.00010198073109277196,
						"acc_stderr,none": 0.08053490267578424,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4842142857142857,
						"acc_stderr,none": 0.03733481753135015,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7178574341080349,
						"acc_norm,none": 0.5297237265864374,
						"acc_norm_stderr,none": 0.004084907082106431,
						"acc_stderr,none": 0.1451975419135961,
						"alias": "pythia",
						"bits_per_byte,none": 0.6682266692073346,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5891184551193365,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.826747737520837,
						"perplexity_stderr,none": 0.14221434957812212,
						"word_perplexity,none": 11.903460406155345,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3617021276595745,
						"acc_norm,none": 0.42730496453900707,
						"acc_norm_stderr,none": 0.044987955343396835,
						"acc_stderr,none": 0.036851470460679096,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5458054640444577,
						"acc_stderr,none": 0.02300027472262574,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.31303129745086594,
						"acc_stderr,none": 0.046447592598135676,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2937576499388005,
						"bleu_acc_stderr,none": 0.000254245212060339,
						"bleu_diff,none": -7.5502972003150015,
						"bleu_diff_stderr,none": 0.6251384286071874,
						"bleu_max,none": 23.937877059675927,
						"bleu_max_stderr,none": 0.5556507830600694,
						"rouge1_acc,none": 0.27539779681762544,
						"rouge1_acc_stderr,none": 0.00024455128716375303,
						"rouge1_diff,none": -9.951319250584817,
						"rouge1_diff_stderr,none": 0.7587933284946611,
						"rouge1_max,none": 49.46546723267731,
						"rouge1_max_stderr,none": 0.7162540556382557,
						"rouge2_acc,none": 0.22031823745410037,
						"rouge2_acc_stderr,none": 0.00021051239178825868,
						"rouge2_diff,none": -11.920053942587268,
						"rouge2_diff_stderr,none": 1.078298560970079,
						"rouge2_max,none": 32.350539226572835,
						"rouge2_max_stderr,none": 0.9420036522288837,
						"rougeL_acc,none": 0.24724602203182375,
						"rougeL_acc_stderr,none": 0.00022808263066331178,
						"rougeL_diff,none": -10.329155354269277,
						"rougeL_diff_stderr,none": 0.7715405811679985,
						"rougeL_max,none": 46.412207003176725,
						"rougeL_max_stderr,none": 0.730402657587926
					},
					"xcopa": {
						"acc,none": 0.5405454545454547,
						"acc_stderr,none": 0.029688555203892242,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.3997054886211513,
						"acc_stderr,none": 0.05058781670580755,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5417243246495398,
						"acc_stderr,none": 0.05362261906116568,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7091481231737469,
						"acc_stderr,none": 0.0723679949993312,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5512965050732808,
						"acc_norm,none": 0.5231116121758738,
						"acc_norm_stderr,none": 0.04067358767317326,
						"acc_stderr,none": 0.054819797821025196,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3353125,
						"acc_stderr,none": 0.014704924886248286,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.014933117490932572,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.014876872027456736,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.013680495725767789,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3199658703071672,
						"acc_norm,none": 0.35494880546075086,
						"acc_norm_stderr,none": 0.013983036904094095,
						"acc_stderr,none": 0.013631345807016196,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6654040404040404,
						"acc_norm,none": 0.6060606060606061,
						"acc_norm_stderr,none": 0.010026305355981823,
						"acc_stderr,none": 0.009682137724327912,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0271,
						"acc_stderr,none": 0.02575404424225429,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.056,
						"acc_stderr,none": 0.005142491867889057,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0515,
						"acc_stderr,none": 0.004943287675881555,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.055,
						"acc_stderr,none": 0.005099068566917319,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0915,
						"acc_stderr,none": 0.00644861747459616,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0075,
						"acc_stderr,none": 0.0019296986470519835,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0085,
						"acc_stderr,none": 0.002053285901060994,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000151,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000068,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.004338394793926247,
						"acc_stderr,none": 0.0013692387389319645,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8348507462686567,
						"acc_stderr,none": 0.14844310370094302,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942319,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329825,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.00244335219932984,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.789,
						"acc_stderr,none": 0.012909130321042095,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745918,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.757,
						"acc_stderr,none": 0.013569640199177451,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.591,
						"acc_stderr,none": 0.015555094373257946,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.013106173040661773,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963528009,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987295,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565487,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280302,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817145,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697075,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.00949157995752503,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651537,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656797,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617333,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.767,
						"acc_stderr,none": 0.01337497251922006,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617333,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.011266140684632168,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.00949157995752505,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.01094526376104296,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727026,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.262,
						"acc_stderr,none": 0.013912208651021347,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151105,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.789,
						"acc_stderr,none": 0.012909130321042092,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.014498627873361427,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665526,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910609,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397344,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796393,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745918,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319325,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397361,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.569,
						"acc_stderr,none": 0.0156679444881735,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.648,
						"acc_stderr,none": 0.015110404505648663,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.717,
						"acc_stderr,none": 0.014251810906481747,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745911,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.655,
						"acc_stderr,none": 0.015039986742055238,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333449,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333454,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.812,
						"acc_stderr,none": 0.012361586015103758,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584931,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578106,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345691,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816336,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.465,
						"acc_stderr,none": 0.015780495050030152,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426127,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745889,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403626,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.734,
						"acc_stderr,none": 0.013979965645145148,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.478,
						"acc_stderr,none": 0.01580397942816195,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783226,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847167,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.01407885699246262,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504413,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823332,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.835,
						"acc_stderr,none": 0.011743632866916164,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946094,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163046,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832025,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306516,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452371,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.438,
						"acc_stderr,none": 0.01569721001969469,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.303,
						"acc_stderr,none": 0.014539683710535245,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6220183486238532,
						"acc_stderr,none": 0.008480656964585253,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.42857142857142855,
						"acc_stderr,none": 0.06672848092813058,
						"alias": "cb",
						"f1,none": 0.21956970232832304,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2310549777117385,
						"acc_norm,none": 0.2310549777117385,
						"acc_norm_stderr,none": 0.10736675941875201,
						"acc_stderr,none": 0.10736675941875201,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122595,
						"acc_stderr,none": 0.08503766788122595,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0879391124952055,
						"acc_stderr,none": 0.0879391124952055,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.06429065810876616,
						"acc_stderr,none": 0.06429065810876616,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.059278386873217015,
						"acc_stderr,none": 0.059278386873217015,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.08446516354424752,
						"acc_stderr,none": 0.08446516354424752,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063838,
						"acc_stderr,none": 0.07213122508063838,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.07988892740217941,
						"acc_stderr,none": 0.07988892740217941,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09933992677987828,
						"acc_stderr,none": 0.09933992677987828,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.045454545454545456,
						"acc_norm,none": 0.045454545454545456,
						"acc_norm_stderr,none": 0.045454545454545456,
						"acc_stderr,none": 0.045454545454545456,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.14285714285714285,
						"acc_norm,none": 0.14285714285714285,
						"acc_norm_stderr,none": 0.07824607964359517,
						"acc_stderr,none": 0.07824607964359517,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271772,
						"acc_stderr,none": 0.10163945352271772,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.05555555555555555,
						"acc_norm,none": 0.05555555555555555,
						"acc_norm_stderr,none": 0.05555555555555556,
						"acc_stderr,none": 0.05555555555555556,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.1724137931034483,
						"acc_norm,none": 0.1724137931034483,
						"acc_norm_stderr,none": 0.07138609234576077,
						"acc_stderr,none": 0.07138609234576077,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.1836734693877551,
						"acc_norm,none": 0.1836734693877551,
						"acc_norm_stderr,none": 0.05589005688828227,
						"acc_stderr,none": 0.05589005688828227,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0679170334216026,
						"acc_stderr,none": 0.0679170334216026,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.2826086956521739,
						"acc_norm,none": 0.2826086956521739,
						"acc_norm_stderr,none": 0.06712194885164874,
						"acc_stderr,none": 0.06712194885164874,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.06007385040937022,
						"acc_stderr,none": 0.06007385040937022,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.25030219305819357,
						"acc_norm,none": 0.25030219305819357,
						"acc_norm_stderr,none": 0.03587964052027906,
						"acc_stderr,none": 0.03587964052027906,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.03445000289173461,
						"acc_stderr,none": 0.03445000289173461,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03346409881055953,
						"acc_stderr,none": 0.03346409881055953,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2679425837320574,
						"acc_norm,none": 0.2679425837320574,
						"acc_norm_stderr,none": 0.030708724295561363,
						"acc_stderr,none": 0.030708724295561363,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.25190839694656486,
						"acc_norm,none": 0.25190839694656486,
						"acc_norm_stderr,none": 0.03807387116306086,
						"acc_stderr,none": 0.03807387116306086,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.23529411764705882,
						"acc_norm,none": 0.23529411764705882,
						"acc_norm_stderr,none": 0.036507817107892686,
						"acc_stderr,none": 0.036507817107892686,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.04218811928205305,
						"acc_stderr,none": 0.04218811928205305,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25077399380804954,
						"acc_norm,none": 0.25077399380804954,
						"acc_norm_stderr,none": 0.024155705949743284,
						"acc_stderr,none": 0.024155705949743284,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604257,
						"acc_stderr,none": 0.030587591351604257,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.25139664804469275,
						"acc_norm,none": 0.25139664804469275,
						"acc_norm_stderr,none": 0.032515888371841106,
						"acc_stderr,none": 0.032515888371841106,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.02830465794303529,
						"acc_stderr,none": 0.02830465794303529,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.042520162237633094,
						"acc_stderr,none": 0.042520162237633094,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.29906542056074764,
						"acc_norm,none": 0.29906542056074764,
						"acc_norm_stderr,none": 0.044470182376718334,
						"acc_stderr,none": 0.044470182376718334,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.043960933774393765,
						"acc_stderr,none": 0.043960933774393765,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980982,
						"acc_stderr,none": 0.03957835471980982,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.23443223443223443,
						"acc_norm,none": 0.23443223443223443,
						"acc_norm_stderr,none": 0.025687156459084187,
						"acc_stderr,none": 0.025687156459084187,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.24509803921568626,
						"acc_norm,none": 0.24509803921568626,
						"acc_norm_stderr,none": 0.030190282453501954,
						"acc_stderr,none": 0.030190282453501954,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.033773102522091945,
						"acc_stderr,none": 0.033773102522091945,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.036948460554439046,
						"acc_stderr,none": 0.036948460554439046,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.034229240176444506,
						"acc_stderr,none": 0.034229240176444506,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25153374233128833,
						"acc_norm,none": 0.25153374233128833,
						"acc_norm_stderr,none": 0.03408997886857529,
						"acc_stderr,none": 0.03408997886857529,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2441860465116279,
						"acc_norm,none": 0.2441860465116279,
						"acc_norm_stderr,none": 0.03285260554707745,
						"acc_stderr,none": 0.03285260554707745,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.27380952380952384,
						"acc_norm,none": 0.27380952380952384,
						"acc_norm_stderr,none": 0.028145741115683853,
						"acc_stderr,none": 0.028145741115683853,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.20707070707070707,
						"acc_norm,none": 0.20707070707070707,
						"acc_norm_stderr,none": 0.02886977846026705,
						"acc_stderr,none": 0.02886977846026705,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.02934457250063434,
						"acc_stderr,none": 0.02934457250063434,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24782608695652175,
						"acc_norm,none": 0.24782608695652175,
						"acc_norm_stderr,none": 0.02853086259541007,
						"acc_stderr,none": 0.02853086259541007,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03785714465066656,
						"acc_stderr,none": 0.03785714465066656,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.23776223776223776,
						"acc_norm,none": 0.23776223776223776,
						"acc_norm_stderr,none": 0.0357250214181557,
						"acc_stderr,none": 0.0357250214181557,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2215909090909091,
						"acc_norm,none": 0.2215909090909091,
						"acc_norm_stderr,none": 0.03139502946092615,
						"acc_stderr,none": 0.03139502946092615,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.24161073825503357,
						"acc_norm,none": 0.24161073825503357,
						"acc_norm_stderr,none": 0.03518627932594346,
						"acc_stderr,none": 0.03518627932594346,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714263,
						"acc_stderr,none": 0.04025566684714263,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878284,
						"acc_stderr,none": 0.04122066502878284,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.25874125874125875,
						"acc_norm,none": 0.25874125874125875,
						"acc_norm_stderr,none": 0.036751374389002375,
						"acc_stderr,none": 0.036751374389002375,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.03809523809523811,
						"acc_stderr,none": 0.03809523809523811,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2616279069767442,
						"acc_norm,none": 0.2616279069767442,
						"acc_norm_stderr,none": 0.033611014038904936,
						"acc_stderr,none": 0.033611014038904936,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.24817518248175183,
						"acc_norm,none": 0.24817518248175183,
						"acc_norm_stderr,none": 0.021332687690541908,
						"acc_stderr,none": 0.021332687690541908,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.029761395837435988,
						"acc_stderr,none": 0.029761395837435988,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2601626016260163,
						"acc_norm,none": 0.2601626016260163,
						"acc_norm_stderr,none": 0.039720129754505354,
						"acc_stderr,none": 0.039720129754505354,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.27049180327868855,
						"acc_norm,none": 0.27049180327868855,
						"acc_norm_stderr,none": 0.04038308168357442,
						"acc_stderr,none": 0.04038308168357442,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.029461344042368914,
						"acc_stderr,none": 0.029461344042368914,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25555555555555554,
						"acc_norm,none": 0.25555555555555554,
						"acc_norm_stderr,none": 0.032601103040276455,
						"acc_stderr,none": 0.032601103040276455,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.031063241573973475,
						"acc_stderr,none": 0.031063241573973475,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2689655172413793,
						"acc_norm,none": 0.2689655172413793,
						"acc_norm_stderr,none": 0.03695183311650232,
						"acc_stderr,none": 0.03695183311650232,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055042,
						"acc_stderr,none": 0.04232473532055042,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.032888897342098225,
						"acc_stderr,none": 0.032888897342098225,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.03046967065084667,
						"acc_stderr,none": 0.03046967065084667,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.23670212765957446,
						"acc_norm,none": 0.23670212765957446,
						"acc_norm_stderr,none": 0.021949896304751578,
						"acc_stderr,none": 0.021949896304751578,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.23275862068965517,
						"acc_norm,none": 0.23275862068965517,
						"acc_norm_stderr,none": 0.027804360209961736,
						"acc_stderr,none": 0.027804360209961736,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.25287356321839083,
						"acc_norm,none": 0.25287356321839083,
						"acc_norm_stderr,none": 0.0330465186437516,
						"acc_stderr,none": 0.0330465186437516,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.02895216745089081,
						"acc_stderr,none": 0.02895216745089081,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.22424242424242424,
						"acc_norm,none": 0.22424242424242424,
						"acc_norm_stderr,none": 0.03256866661681102,
						"acc_stderr,none": 0.03256866661681102,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2546583850931677,
						"acc_norm,none": 0.2546583850931677,
						"acc_norm_stderr,none": 0.03444265995779324,
						"acc_stderr,none": 0.03444265995779324,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.035410885580708956,
						"acc_stderr,none": 0.035410885580708956,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.010723265836973498,
						"mcc_stderr,none": 0.030801720032111016
					},
					"copa": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.5168362403100777,
						"likelihood_diff_stderr,none": 0.44199934465292684,
						"pct_stereotype,none": 0.5691711389385808,
						"pct_stereotype_stderr,none": 0.08301646280854093
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.4827817531305905,
						"likelihood_diff_stderr,none": 0.08593319633901526,
						"pct_stereotype,none": 0.6237328562909958,
						"pct_stereotype_stderr,none": 0.011833424715510596
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.9642857142857144,
						"likelihood_diff_stderr,none": 0.4109464757490657,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.04865042554105199
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 4.2727272727272725,
						"likelihood_diff_stderr,none": 1.2807377780788491,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.286538461538462,
						"likelihood_diff_stderr,none": 0.6350260908771186,
						"pct_stereotype,none": 0.7076923076923077,
						"pct_stereotype_stderr,none": 0.05685286730420954
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.528515625,
						"likelihood_diff_stderr,none": 0.16655311008461673,
						"pct_stereotype,none": 0.63125,
						"pct_stereotype_stderr,none": 0.02701290980694683
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.5127314814814814,
						"likelihood_diff_stderr,none": 0.23539641361399832,
						"pct_stereotype,none": 0.5370370370370371,
						"pct_stereotype_stderr,none": 0.03400603625538272
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.7586805555555554,
						"likelihood_diff_stderr,none": 0.3950629269948709,
						"pct_stereotype,none": 0.6944444444444444,
						"pct_stereotype_stderr,none": 0.05466818705978919
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.2002952755905514,
						"likelihood_diff_stderr,none": 0.1403370362594733,
						"pct_stereotype,none": 0.5374015748031497,
						"pct_stereotype_stderr,none": 0.022143566088969842
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.4695945945945947,
						"likelihood_diff_stderr,none": 0.32358499456962947,
						"pct_stereotype,none": 0.7477477477477478,
						"pct_stereotype_stderr,none": 0.04140938118194942
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.327956989247312,
						"likelihood_diff_stderr,none": 0.4253746749065914,
						"pct_stereotype,none": 0.8602150537634409,
						"pct_stereotype_stderr,none": 0.036152622588464155
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.1703947368421055,
						"likelihood_diff_stderr,none": 0.24925915545353275,
						"pct_stereotype,none": 0.6789473684210526,
						"pct_stereotype_stderr,none": 0.03396059335824887
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.5374925462134765,
						"likelihood_diff_stderr,none": 0.08570871063293013,
						"pct_stereotype,none": 0.5116279069767442,
						"pct_stereotype_stderr,none": 0.012209996095069644
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.6152777777777776,
						"likelihood_diff_stderr,none": 0.3355859500107813,
						"pct_stereotype,none": 0.4888888888888889,
						"pct_stereotype_stderr,none": 0.05298680599073449
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.1538461538461537,
						"likelihood_diff_stderr,none": 0.7550781403454966,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.14044168141158106
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.799242424242424,
						"likelihood_diff_stderr,none": 0.47440981912807306,
						"pct_stereotype,none": 0.6818181818181818,
						"pct_stereotype_stderr,none": 0.057771719027476576
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.3633177570093458,
						"likelihood_diff_stderr,none": 0.19118156624856716,
						"pct_stereotype,none": 0.5327102803738317,
						"pct_stereotype_stderr,none": 0.027890972865217984
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.9822134387351777,
						"likelihood_diff_stderr,none": 0.2195585847814602,
						"pct_stereotype,none": 0.3359683794466403,
						"pct_stereotype_stderr,none": 0.029753859790872788
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.0868055555555554,
						"likelihood_diff_stderr,none": 0.4924895567154129,
						"pct_stereotype,none": 0.5972222222222222,
						"pct_stereotype_stderr,none": 0.05820650942569533
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.0714673913043478,
						"likelihood_diff_stderr,none": 0.15221095815191343,
						"pct_stereotype,none": 0.43043478260869567,
						"pct_stereotype_stderr,none": 0.023111017495849553
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.767391304347826,
						"likelihood_diff_stderr,none": 0.33563358319098824,
						"pct_stereotype,none": 0.6782608695652174,
						"pct_stereotype_stderr,none": 0.04375199868936841
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.693681318681319,
						"likelihood_diff_stderr,none": 0.3786869209485132,
						"pct_stereotype,none": 0.7692307692307693,
						"pct_stereotype_stderr,none": 0.04441155916843277
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.9926658163265305,
						"likelihood_diff_stderr,none": 0.2732482316703294,
						"pct_stereotype,none": 0.6224489795918368,
						"pct_stereotype_stderr,none": 0.03471541794449721
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.018208661417322834,
						"exact_match_stderr,none": 0.0029668367280168864
					},
					"glue": {
						"acc,none": 0.4765292994461509,
						"acc_stderr,none": 0.05922961391830872,
						"alias": "glue",
						"f1,none": 0.30815310544040614,
						"f1_stderr,none": 0.0023964122096718178,
						"mcc,none": -0.010723265836973498,
						"mcc_stderr,none": 0.0009487459569365491
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.018953752843062926,
						"exact_match_stderr,get-answer": 0.003756078341031472
					},
					"hellaswag": {
						"acc,none": 0.47679745070703045,
						"acc_norm,none": 0.6320454092810197,
						"acc_norm_stderr,none": 0.004812633280078257,
						"acc_stderr,none": 0.004984405935541098,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.19275194917701416,
						"acc_norm,none": 0.19275194917701416,
						"acc_norm_stderr,none": 0.031843847251752495,
						"acc_stderr,none": 0.031843847251752495,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.17,
						"acc_norm,none": 0.17,
						"acc_norm_stderr,none": 0.03775251680686371,
						"acc_stderr,none": 0.03775251680686371,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.151,
						"acc_norm,none": 0.151,
						"acc_norm_stderr,none": 0.011328165223341676,
						"acc_stderr,none": 0.011328165223341676,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.19,
						"acc_norm,none": 0.19,
						"acc_norm_stderr,none": 0.012411851354816329,
						"acc_stderr,none": 0.012411851354816329,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.223,
						"acc_norm,none": 0.223,
						"acc_norm_stderr,none": 0.013169830843425667,
						"acc_stderr,none": 0.013169830843425667,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.216,
						"acc_norm,none": 0.216,
						"acc_norm_stderr,none": 0.01301973553930781,
						"acc_stderr,none": 0.01301973553930781,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.01806848202433441,
						"acc_stderr,none": 0.01806848202433441,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.164,
						"acc_norm,none": 0.164,
						"acc_norm_stderr,none": 0.011715000693181328,
						"acc_stderr,none": 0.011715000693181328,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.173,
						"acc_norm,none": 0.173,
						"acc_norm_stderr,none": 0.011967214137559933,
						"acc_stderr,none": 0.011967214137559933,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.134,
						"acc_norm,none": 0.134,
						"acc_norm_stderr,none": 0.010777762298369678,
						"acc_stderr,none": 0.010777762298369678,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.029365141882663322,
						"acc_stderr,none": 0.029365141882663322,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.134,
						"acc_norm,none": 0.134,
						"acc_norm_stderr,none": 0.010777762298369678,
						"acc_stderr,none": 0.010777762298369678,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.35384615384615387,
						"acc_norm,none": 0.35384615384615387,
						"acc_norm_stderr,none": 0.042099830898262615,
						"acc_stderr,none": 0.042099830898262615,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816506,
						"acc_stderr,none": 0.04229525846816506,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.204,
						"acc_norm,none": 0.204,
						"acc_norm_stderr,none": 0.012749374359024387,
						"acc_stderr,none": 0.012749374359024387,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.012655439943366648,
						"acc_stderr,none": 0.012655439943366648,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.013106173040661785,
						"acc_stderr,none": 0.013106173040661785,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.17,
						"acc_norm,none": 0.17,
						"acc_norm_stderr,none": 0.011884495834541672,
						"acc_stderr,none": 0.011884495834541672,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.194,
						"acc_norm,none": 0.194,
						"acc_norm_stderr,none": 0.012510816141264357,
						"acc_stderr,none": 0.012510816141264357,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.181,
						"acc_norm,none": 0.181,
						"acc_norm_stderr,none": 0.012181436179177909,
						"acc_stderr,none": 0.012181436179177909,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.193,
						"acc_norm,none": 0.193,
						"acc_norm_stderr,none": 0.0124862687343701,
						"acc_stderr,none": 0.0124862687343701,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.193,
						"acc_norm,none": 0.193,
						"acc_norm_stderr,none": 0.012486268734370101,
						"acc_stderr,none": 0.012486268734370101,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.17,
						"acc_norm,none": 0.17,
						"acc_norm_stderr,none": 0.011884495834541663,
						"acc_stderr,none": 0.011884495834541663,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.175,
						"acc_norm,none": 0.175,
						"acc_norm_stderr,none": 0.012021627157731985,
						"acc_stderr,none": 0.012021627157731985,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.167,
						"acc_norm,none": 0.167,
						"acc_norm_stderr,none": 0.011800434324644596,
						"acc_stderr,none": 0.011800434324644596,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.01355063170555596,
						"acc_stderr,none": 0.01355063170555596,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.187,
						"acc_norm,none": 0.187,
						"acc_norm_stderr,none": 0.012336254828074121,
						"acc_stderr,none": 0.012336254828074121,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.224,
						"acc_norm,none": 0.224,
						"acc_norm_stderr,none": 0.013190830072364466,
						"acc_stderr,none": 0.013190830072364466,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.016343556928908817,
						"acc_stderr,none": 0.016343556928908817,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.153,
						"acc_norm,none": 0.153,
						"acc_norm_stderr,none": 0.011389500459665549,
						"acc_stderr,none": 0.011389500459665549,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.174,
						"acc_norm,none": 0.174,
						"acc_norm_stderr,none": 0.011994493230973435,
						"acc_stderr,none": 0.011994493230973435,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.227,
						"acc_norm,none": 0.227,
						"acc_norm_stderr,none": 0.013253174964763935,
						"acc_stderr,none": 0.013253174964763935,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.167,
						"acc_norm,none": 0.167,
						"acc_norm_stderr,none": 0.011800434324644607,
						"acc_stderr,none": 0.011800434324644607,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.045126085985421276,
						"acc_stderr,none": 0.045126085985421276,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23666666666666666,
						"acc_norm,none": 0.23666666666666666,
						"acc_norm_stderr,none": 0.024580463430538727,
						"acc_stderr,none": 0.024580463430538727,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.013550631705555967,
						"acc_stderr,none": 0.013550631705555967,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.173,
						"acc_norm,none": 0.173,
						"acc_norm_stderr,none": 0.011967214137559934,
						"acc_stderr,none": 0.011967214137559934,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.194,
						"acc_norm,none": 0.194,
						"acc_norm_stderr,none": 0.012510816141264347,
						"acc_stderr,none": 0.012510816141264347,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.19,
						"acc_norm,none": 0.19,
						"acc_norm_stderr,none": 0.027809473820460097,
						"acc_stderr,none": 0.027809473820460097,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.012886662332274559,
						"acc_stderr,none": 0.012886662332274559,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.219,
						"acc_norm,none": 0.219,
						"acc_norm_stderr,none": 0.01308473195026201,
						"acc_stderr,none": 0.01308473195026201,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.012655439943366653,
						"acc_stderr,none": 0.012655439943366653,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.4841043630782723,
						"acc_norm,none": 0.456,
						"acc_norm_stderr,none": 0.0004971222444889777,
						"acc_stderr,none": 0.04108291451305947,
						"alias": "kobest",
						"f1,none": 0.3856501304501679,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5014245014245015,
						"acc_stderr,none": 0.013348713323588866,
						"alias": " - kobest_boolq",
						"f1,none": 0.33523266856600187,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.507,
						"acc_stderr,none": 0.015817749561843567,
						"alias": " - kobest_copa",
						"f1,none": 0.5063241577719899,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.342,
						"acc_norm,none": 0.456,
						"acc_norm_stderr,none": 0.022296238348407063,
						"acc_stderr,none": 0.021236147199899254,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3400257873978433,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5314861460957179,
						"acc_stderr,none": 0.025076077305681316,
						"alias": " - kobest_sentineg",
						"f1,none": 0.5004194630872483,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5620997477197749,
						"acc_stderr,none": 0.022108223425668115,
						"alias": "lambada",
						"perplexity,none": 7.169945572792913,
						"perplexity_stderr,none": 0.7220162901262329
					},
					"lambada_cloze": {
						"acc,none": 0.03483407723656123,
						"acc_stderr,none": 0.003297610989989496,
						"alias": "lambada_cloze",
						"perplexity,none": 649.9961081254019,
						"perplexity_stderr,none": 139.9723628603054
					},
					"lambada_multilingual": {
						"acc,none": 0.36743644478944304,
						"acc_stderr,none": 0.06807273135902886,
						"alias": "lambada_multilingual",
						"perplexity,none": 85.61923274283521,
						"perplexity_stderr,none": 24.934559721383756
					},
					"lambada_openai": {
						"acc,none": 0.6089656510770425,
						"acc_stderr,none": 0.006798544197091018,
						"alias": " - lambada_openai",
						"perplexity,none": 5.826747737520837,
						"perplexity_stderr,none": 0.14221434957812212
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.03066175043663885,
						"acc_stderr,none": 0.002401862934162414,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 374.84449335532895,
						"perplexity_stderr,none": 12.566603378638536
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.2889578886085775,
						"acc_stderr,none": 0.006315053173776882,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 111.70921565884652,
						"perplexity_stderr,none": 6.7130825671387395
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6037259848631865,
						"acc_stderr,none": 0.006814434238262819,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 5.776225342797313,
						"perplexity_stderr,none": 0.14023382949276372
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.2848825926644673,
						"acc_stderr,none": 0.006288306538252616,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 115.041411283382,
						"perplexity_stderr,none": 6.4558637110495605
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.34892295750048513,
						"acc_stderr,none": 0.006640381581831473,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 79.00193299360477,
						"perplexity_stderr,none": 4.502315563797141
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.31069280031049873,
						"acc_stderr,none": 0.00644739198299003,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 116.56737843554541,
						"perplexity_stderr,none": 7.125874424310032
					},
					"lambada_standard": {
						"acc,none": 0.5200853871531147,
						"acc_stderr,none": 0.006960354919832297,
						"alias": " - lambada_standard",
						"perplexity,none": 8.56096075664032,
						"perplexity_stderr,none": 0.23510920332195967
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.0390064040364836,
						"acc_stderr,none": 0.0026973684726303033,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 925.1477228954748,
						"perplexity_stderr,none": 34.18852178433885
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.25318066157760816,
						"exact_match_stderr,get-answer": 0.010970715436423053
					},
					"logiqa": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.017803862148538012,
						"acc_stderr,none": 0.016705867034419633,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.22391857506361323,
						"acc_norm,none": 0.27353689567430023,
						"acc_norm_stderr,none": 0.011246739746251146,
						"acc_stderr,none": 0.01051745379720071,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25996649916247905,
						"acc_norm,none": 0.25896147403685094,
						"acc_norm_stderr,none": 0.008019338828219912,
						"acc_stderr,none": 0.008029434758777933,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.4406905316670197,
						"acc_stderr,none": 0.0051095652169729075,
						"alias": "mc_taco",
						"f1,none": 0.4968080038113387,
						"f1_stderr,none": 0.005986982291873034
					},
					"medmcqa": {
						"acc,none": 0.2505378914654554,
						"acc_norm,none": 0.2505378914654554,
						"acc_norm_stderr,none": 0.006700690136363488,
						"acc_stderr,none": 0.006700690136363488,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.23409269442262373,
						"acc_norm,none": 0.23409269442262373,
						"acc_norm_stderr,none": 0.011872398915414776,
						"acc_stderr,none": 0.011872398915414776,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.25943597778094285,
						"acc_stderr,none": 0.03725574885708458,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.35555555555555557,
						"acc_stderr,none": 0.04135176749720386,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.28289473684210525,
						"acc_stderr,none": 0.03665349695640767,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.024618298195866518,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2638888888888889,
						"acc_stderr,none": 0.03685651095897532,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.16,
						"acc_stderr,none": 0.0368452949177471,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.23121387283236994,
						"acc_stderr,none": 0.0321473730202947,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171452,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2127659574468085,
						"acc_stderr,none": 0.02675439134803977,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.3157894736842105,
						"acc_stderr,none": 0.04372748290278008,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.296551724137931,
						"acc_stderr,none": 0.03806142687309993,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.022569897074918417,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.0404061017820884,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252604,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.24838709677419354,
						"acc_stderr,none": 0.024580028921481003,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.270935960591133,
						"acc_stderr,none": 0.031270907132976984,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.2909090909090909,
						"acc_stderr,none": 0.03546563019624337,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2828282828282828,
						"acc_stderr,none": 0.03208779558786752,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.23316062176165803,
						"acc_stderr,none": 0.03051611137147601,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2205128205128205,
						"acc_stderr,none": 0.02102067268082791,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_stderr,none": 0.02696242432507383,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.22268907563025211,
						"acc_stderr,none": 0.027025433498882374,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.24403669724770644,
						"acc_stderr,none": 0.018415286351416392,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2361111111111111,
						"acc_stderr,none": 0.028963702570791037,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.25980392156862747,
						"acc_stderr,none": 0.03077855467869326,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2742616033755274,
						"acc_stderr,none": 0.029041333510598028,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.242152466367713,
						"acc_stderr,none": 0.028751392398694755,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.21374045801526717,
						"acc_stderr,none": 0.0359546161177469,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.26588735387885226,
						"acc_stderr,none": 0.03326435372325513,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.33884297520661155,
						"acc_stderr,none": 0.04320767807536671,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.23148148148148148,
						"acc_stderr,none": 0.04077494709252626,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.24539877300613497,
						"acc_stderr,none": 0.03380939813943354,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.23214285714285715,
						"acc_stderr,none": 0.04007341809755806,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2621359223300971,
						"acc_stderr,none": 0.04354631077260594,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.27350427350427353,
						"acc_stderr,none": 0.029202540153431197,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2796934865900383,
						"acc_stderr,none": 0.016050792148036543,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.30057803468208094,
						"acc_stderr,none": 0.024685316867257806,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24134078212290502,
						"acc_stderr,none": 0.01431099954796146,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.02463004897982478,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2594142259414226,
						"acc_stderr,none": 0.03711469955148084,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2990353697749196,
						"acc_stderr,none": 0.02600330111788513,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2808641975308642,
						"acc_stderr,none": 0.02500646975579921,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2730496453900709,
						"acc_stderr,none": 0.026577860943307854,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.25097783572359844,
						"acc_stderr,none": 0.011073730299187234,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.24632352941176472,
						"acc_stderr,none": 0.02617343857052,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.272875816993464,
						"acc_stderr,none": 0.01802047414839358,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2909090909090909,
						"acc_stderr,none": 0.04350271442923243,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.24081632653061225,
						"acc_stderr,none": 0.02737294220178816,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.24991875203119923,
						"acc_stderr,none": 0.03470814656840191,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.263681592039801,
						"acc_stderr,none": 0.031157150869355568,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2591183000317159,
						"acc_stderr,none": 0.044085467609407146,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.29518072289156627,
						"acc_stderr,none": 0.0355092018568963,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3157894736842105,
						"acc_stderr,none": 0.035650796707083106,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.37004584819154357,
						"acc_stderr,none": 0.004873704709776533,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.37164361269324653,
						"acc_stderr,none": 0.004873797777343965,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6764705882352942,
						"acc_stderr,none": 0.02318911310940354,
						"alias": "mrpc",
						"f1,none": 0.8064516129032258,
						"f1_stderr,none": 0.01650839106747271
					},
					"multimedqa": {
						"acc,none": 0.2715400993612491,
						"acc_norm,none": 0.24405994633802958,
						"acc_norm_stderr,none": 0.00010198073109277196,
						"acc_stderr,none": 0.08053490267578424,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.571575907590759,
						"acc_stderr,none": 0.007107835859605345,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6734386774339203,
						"mrr_stderr,none": 0.01027474604918485,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.43792325056433407,
						"r@2_stderr,none": 0.01667727833407506
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6285741177826918,
						"mrr_stderr,none": 0.010297012853983163,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4785553047404063,
						"r@2_stderr,none": 0.016791850493288407
					},
					"openbookqa": {
						"acc,none": 0.248,
						"acc_norm,none": 0.366,
						"acc_norm_stderr,none": 0.021564276850201614,
						"acc_stderr,none": 0.019332342821239103,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4305,
						"acc_stderr,none": 0.011074574398099854,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4085,
						"acc_stderr,none": 0.010994285431808401,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4745,
						"acc_stderr,none": 0.011168582883330074,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5405,
						"acc_stderr,none": 0.011146389370464352,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.4845,
						"acc_stderr,none": 0.01117776123260332,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.011139750761283315,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.507,
						"acc_stderr,none": 0.011182040020027772,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4842142857142857,
						"acc_stderr,none": 0.03733481753135015,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7475516866158868,
						"acc_norm,none": 0.7595212187159956,
						"acc_norm_stderr,none": 0.00997134536465107,
						"acc_stderr,none": 0.01013566554736236,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.25426985482493597,
						"acc_norm,none": 0.2878415883859949,
						"acc_norm_stderr,none": 0.003307792260514475,
						"acc_stderr,none": 0.003181349906864455,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228137,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7178574341080349,
						"acc_norm,none": 0.5297237265864374,
						"acc_norm_stderr,none": 0.004084907082106431,
						"acc_stderr,none": 0.1451975419135961,
						"alias": "pythia",
						"bits_per_byte,none": 0.6682266692073346,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5891184551193365,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.826747737520837,
						"perplexity_stderr,none": 0.14221434957812212,
						"word_perplexity,none": 11.903460406155345,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3617021276595745,
						"acc_norm,none": 0.42730496453900707,
						"acc_norm_stderr,none": 0.044987955343396835,
						"acc_stderr,none": 0.036851470460679096,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4,
						"acc_norm,none": 0.5166666666666667,
						"acc_norm_stderr,none": 0.045809453927047654,
						"acc_stderr,none": 0.04490887131390718,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.35,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.0393415738622931,
						"acc_stderr,none": 0.0378261498181204,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.352112676056338,
						"acc_norm,none": 0.38380281690140844,
						"acc_norm_stderr,none": 0.028908177688046176,
						"acc_stderr,none": 0.028392089391036893,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5200439319055464,
						"acc_stderr,none": 0.006759972234057624,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5144694533762058,
						"acc_stderr,none": 0.0024856591570400557,
						"alias": "qqp",
						"f1,none": 0.30325832327678,
						"f1_stderr,none": 0.0035584186090726992
					},
					"race": {
						"acc,none": 0.369377990430622,
						"acc_stderr,none": 0.014937221457864277,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.592057761732852,
						"acc_stderr,none": 0.029581952519606197,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.889,
						"acc_norm,none": 0.832,
						"acc_norm_stderr,none": 0.011828605831454264,
						"acc_stderr,none": 0.009938701010583726,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5956678700361011,
						"acc_stderr,none": 0.02954042051761972,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.6192660550458715,
						"acc_stderr,none": 0.01645282049019051,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5367889633110067,
						"acc_norm,none": 0.7302309307207837,
						"acc_norm_stderr,none": 0.0031380286998276253,
						"acc_stderr,none": 0.0035255100770811704,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5458054640444577,
						"acc_stderr,none": 0.02300027472262574,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5406650641025641,
						"acc_stderr,none": 0.004987677418438801,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.6030201682375596,
						"acc_stderr,none": 0.004925831876678817,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.49549019607843137,
						"acc_stderr,none": 0.004950779022493166,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.31303129745086594,
						"acc_stderr,none": 0.046447592598135676,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2937576499388005,
						"bleu_acc_stderr,none": 0.000254245212060339,
						"bleu_diff,none": -7.5502972003150015,
						"bleu_diff_stderr,none": 0.6251384286071874,
						"bleu_max,none": 23.937877059675927,
						"bleu_max_stderr,none": 0.5556507830600694,
						"rouge1_acc,none": 0.27539779681762544,
						"rouge1_acc_stderr,none": 0.00024455128716375303,
						"rouge1_diff,none": -9.951319250584817,
						"rouge1_diff_stderr,none": 0.7587933284946611,
						"rouge1_max,none": 49.46546723267731,
						"rouge1_max_stderr,none": 0.7162540556382557,
						"rouge2_acc,none": 0.22031823745410037,
						"rouge2_acc_stderr,none": 0.00021051239178825868,
						"rouge2_diff,none": -11.920053942587268,
						"rouge2_diff_stderr,none": 1.078298560970079,
						"rouge2_max,none": 32.350539226572835,
						"rouge2_max_stderr,none": 0.9420036522288837,
						"rougeL_acc,none": 0.24724602203182375,
						"rougeL_acc_stderr,none": 0.00022808263066331178,
						"rougeL_diff,none": -10.329155354269277,
						"rougeL_diff_stderr,none": 0.7715405811679985,
						"rougeL_max,none": 46.412207003176725,
						"rougeL_max_stderr,none": 0.730402657587926
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2937576499388005,
						"bleu_acc_stderr,none": 0.015945068581236614,
						"bleu_diff,none": -7.5502972003150015,
						"bleu_diff_stderr,none": 0.7906569601332726,
						"bleu_max,none": 23.937877059675927,
						"bleu_max_stderr,none": 0.7454198703147572,
						"rouge1_acc,none": 0.27539779681762544,
						"rouge1_acc_stderr,none": 0.015638135667775523,
						"rouge1_diff,none": -9.951319250584817,
						"rouge1_diff_stderr,none": 0.8710874402117511,
						"rouge1_max,none": 49.46546723267731,
						"rouge1_max_stderr,none": 0.8463179400427807,
						"rouge2_acc,none": 0.22031823745410037,
						"rouge2_acc_stderr,none": 0.014509045171487291,
						"rouge2_diff,none": -11.920053942587268,
						"rouge2_diff_stderr,none": 1.038411556643164,
						"rouge2_max,none": 32.350539226572835,
						"rouge2_max_stderr,none": 0.9705687261749596,
						"rougeL_acc,none": 0.24724602203182375,
						"rougeL_acc_stderr,none": 0.01510240479735965,
						"rougeL_diff,none": -10.329155354269277,
						"rougeL_diff_stderr,none": 0.8783738276884157,
						"rougeL_max,none": 46.412207003176725,
						"rougeL_max_stderr,none": 0.8546359795772268
					},
					"truthfulqa_mc1": {
						"acc,none": 0.21909424724602203,
						"acc_stderr,none": 0.014480038578757447,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3599998225532879,
						"acc_stderr,none": 0.013739223591594373,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.018208661417322834,
						"exact_match_stderr,none": 0.0029668367280168864
					},
					"wic": {
						"acc,none": 0.5360501567398119,
						"acc_stderr,none": 0.01975916162518925,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6682266692073346,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5891184551193365,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 11.903460406155345,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6148382004735596,
						"acc_stderr,none": 0.013676821287521429,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.39436619718309857,
						"acc_stderr,none": 0.058412510854444266,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7619047619047619,
						"acc_stderr,none": 0.025825054502221032,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5405454545454547,
						"acc_stderr,none": 0.029688555203892242,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044296,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.564,
						"acc_stderr,none": 0.0221989546414768,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.022187215803029008,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.022311333245289663,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.022342748192502843,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.022311333245289666,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618556,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.3997054886211513,
						"acc_stderr,none": 0.05058781670580755,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.334136546184739,
						"acc_stderr,none": 0.009454577602463628,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.41004016064257026,
						"acc_stderr,none": 0.009858525713807858,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.44819277108433736,
						"acc_stderr,none": 0.00996812942690988,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3742971887550201,
						"acc_stderr,none": 0.009700182103576732,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.542570281124498,
						"acc_stderr,none": 0.009985682220227443,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4465863453815261,
						"acc_stderr,none": 0.009964722457358764,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.46265060240963857,
						"acc_stderr,none": 0.009994072620561411,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3634538152610442,
						"acc_stderr,none": 0.009641111987257547,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.42690763052208835,
						"acc_stderr,none": 0.009914408828583405,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3542168674698795,
						"acc_stderr,none": 0.009586620142951844,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3859437751004016,
						"acc_stderr,none": 0.009757838842063325,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3855421686746988,
						"acc_stderr,none": 0.009755949341224318,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3329317269076305,
						"acc_stderr,none": 0.009446051001358228,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.38795180722891565,
						"acc_stderr,none": 0.009767181346586388,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3401606425702811,
						"acc_stderr,none": 0.009496174608136402,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5417243246495398,
						"acc_stderr,none": 0.05362261906116568,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5056254136333554,
						"acc_stderr,none": 0.01286631092307251,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7021839841164792,
						"acc_stderr,none": 0.01176822629134189,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.598941098610192,
						"acc_stderr,none": 0.01261268831876706,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5241561879549967,
						"acc_stderr,none": 0.012852100057309615,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5162144275314361,
						"acc_stderr,none": 0.012860357805055855,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5221707478491066,
						"acc_stderr,none": 0.012854469625936085,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4884182660489742,
						"acc_stderr,none": 0.012863672949335896,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5340833884844474,
						"acc_stderr,none": 0.012837195610619434,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.4937127729980146,
						"acc_stderr,none": 0.01286610802121821,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5354070152217075,
						"acc_stderr,none": 0.012834822852860037,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5380542686962276,
						"acc_stderr,none": 0.012829804720321693,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7091481231737469,
						"acc_stderr,none": 0.0723679949993312,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8163440860215053,
						"acc_stderr,none": 0.008031950484676923,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6506024096385542,
						"acc_stderr,none": 0.052651513564404694,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.556830031282586,
						"acc_stderr,none": 0.016049582215584283,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6311787072243346,
						"acc_stderr,none": 0.029808046634490215,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5904761904761905,
						"acc_stderr,none": 0.027750828240174344,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.628968253968254,
						"acc_stderr,none": 0.02153951426767635,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "EleutherAI/pythia-6.9b"
	},
	"RWKV/rwkv-4-world-1b5": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.4808342728297632,
						"acc_norm,none": 0.44673055242390086,
						"acc_norm_stderr,none": 0.033412806201487226,
						"acc_stderr,none": 0.04987258870642081,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.334375,
						"acc_stderr,none": 0.02042049510113634,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.038,
						"acc_stderr,none": 0.038029216765005085,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8385970149253731,
						"acc_stderr,none": 0.14801113183867534,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2310549777117385,
						"acc_norm,none": 0.2310549777117385,
						"acc_norm_stderr,none": 0.10765310032289531,
						"acc_stderr,none": 0.10765310032289531,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.25306510101882235,
						"acc_norm,none": 0.25306510101882235,
						"acc_norm_stderr,none": 0.03499944607117969,
						"acc_stderr,none": 0.03499944607117969,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.361396839594514,
						"likelihood_diff_stderr,none": 0.4872343588064916,
						"pct_stereotype,none": 0.5578413834227789,
						"pct_stereotype_stderr,none": 0.07984053245823468
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.011811023622047244,
						"exact_match_stderr,none": 0.0023972250639872506
					},
					"glue": {
						"acc,none": 0.4610788490020706,
						"acc_stderr,none": 0.0554467594315232,
						"alias": "glue",
						"f1,none": 0.5046331229123735,
						"f1_stderr,none": 0.000667253235341036,
						"mcc,none": 0.03258848578270236,
						"mcc_stderr,none": 0.0011437926431812062
					},
					"kmmlu": {
						"acc,none": 0.09717008374241987,
						"acc_norm,none": 0.09717008374241987,
						"acc_norm_stderr,none": 0.0655657065822655,
						"acc_stderr,none": 0.0655657065822655,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.49090111817583865,
						"acc_norm,none": 0.438,
						"acc_norm_stderr,none": 0.0004932985971943903,
						"acc_stderr,none": 0.04610883621932799,
						"alias": "kobest",
						"f1,none": 0.3915989537193584,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5602561614593441,
						"acc_stderr,none": 0.021511261110812505,
						"alias": "lambada",
						"perplexity,none": 7.6510207311571214,
						"perplexity_stderr,none": 0.7787707961725762
					},
					"lambada_cloze": {
						"acc,none": 0.01571899864156802,
						"acc_stderr,none": 0.0058867052760591745,
						"alias": "lambada_cloze",
						"perplexity,none": 676.6021431590541,
						"perplexity_stderr,none": 70.78269890804017
					},
					"lambada_multilingual": {
						"acc,none": 0.38482437415098003,
						"acc_stderr,none": 0.0633403976700295,
						"alias": "lambada_multilingual",
						"perplexity,none": 72.53961763566299,
						"perplexity_stderr,none": 22.176978026270515
					},
					"mmlu": {
						"acc,none": 0.25032046716991885,
						"acc_stderr,none": 0.04027843155617069,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.24272051009564294,
						"acc_stderr,none": 0.0275756742589877,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2780817508850981,
						"acc_stderr,none": 0.040202166523326935,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.24406889827754305,
						"acc_stderr,none": 0.034902212268621446,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2404059625753251,
						"acc_stderr,none": 0.05289275274815807,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.28644428672817596,
						"acc_norm,none": 0.25994633802960593,
						"acc_norm_stderr,none": 0.00011957776310244825,
						"acc_stderr,none": 0.08330426692719474,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5369285714285714,
						"acc_stderr,none": 0.013536653575600844,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7142431553991406,
						"acc_norm,none": 0.45478932781259745,
						"acc_norm_stderr,none": 0.003881017061448089,
						"acc_stderr,none": 0.14877406948199687,
						"alias": "pythia",
						"bits_per_byte,none": 0.7637339753552164,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6978793848608749,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 6.1474198581387185,
						"perplexity_stderr,none": 0.15046766334689085,
						"word_perplexity,none": 16.959594076350623,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.28900709219858156,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.05356214311840289,
						"acc_stderr,none": 0.034528285726846755,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.4980533093740641,
						"acc_stderr,none": 0.0050832653243415545,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3056277590681946,
						"acc_stderr,none": 0.045290383112689264,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.0208078335373317,
						"bleu_acc_stderr,none": 2.4969200491193862e-05,
						"bleu_diff,none": 0.0008253782641055972,
						"bleu_diff_stderr,none": 1.1262767938605328e-05,
						"bleu_max,none": 0.025113949832601092,
						"bleu_max_stderr,none": 1.8615258494472413e-05,
						"rouge1_acc,none": 0.0208078335373317,
						"rouge1_acc_stderr,none": 2.4969200491193733e-05,
						"rouge1_diff,none": -0.006365325137735726,
						"rouge1_diff_stderr,none": 0.0001700957251918172,
						"rouge1_max,none": 0.13394395330398728,
						"rouge1_max_stderr,none": 0.002079138328127845,
						"rouge2_acc,none": 0.0,
						"rouge2_acc_stderr,none": 0.0,
						"rouge2_diff,none": 0.0,
						"rouge2_diff_stderr,none": 0.0,
						"rouge2_max,none": 0.0,
						"rouge2_max_stderr,none": 0.0,
						"rougeL_acc,none": 0.022031823745410038,
						"rougeL_acc_stderr,none": 2.640492951943825e-05,
						"rougeL_diff,none": 0.001958768850139991,
						"rougeL_diff_stderr,none": 0.00010097143017710256,
						"rougeL_max,none": 0.12356928454180827,
						"rougeL_max_stderr,none": 0.0020212436979291475
					},
					"xcopa": {
						"acc,none": 0.5539999999999999,
						"acc_stderr,none": 0.03940564852000849,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.3933868808567604,
						"acc_stderr,none": 0.042467021925206294,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.559593285602551,
						"acc_stderr,none": 0.04909571529908462,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.6772308383906496,
						"acc_stderr,none": 0.05922927253493657,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.4808342728297632,
						"acc_norm,none": 0.44673055242390086,
						"acc_norm_stderr,none": 0.033412806201487226,
						"acc_stderr,none": 0.04987258870642081,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.334375,
						"acc_stderr,none": 0.02042049510113634,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.307,
						"acc_stderr,none": 0.014593284892852621,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.014876872027456732,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.36083333333333334,
						"acc_stderr,none": 0.013869180252444865,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.2713310580204778,
						"acc_norm,none": 0.31143344709897613,
						"acc_norm_stderr,none": 0.013532472099850942,
						"acc_stderr,none": 0.012993807727545794,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.5841750841750841,
						"acc_norm,none": 0.5134680134680135,
						"acc_norm_stderr,none": 0.01025606085484075,
						"acc_stderr,none": 0.010113348244647869,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.038,
						"acc_stderr,none": 0.038029216765005085,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0125,
						"acc_stderr,none": 0.0024849471787626713,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0695,
						"acc_stderr,none": 0.005687798389997829,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.1855,
						"acc_stderr,none": 0.008693829210029837,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.081,
						"acc_stderr,none": 0.006102304405675846,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.011,
						"acc_stderr,none": 0.0023328568559933755,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0175,
						"acc_stderr,none": 0.00293277608892907,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0025,
						"acc_stderr,none": 0.0011169148353275286,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000148,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8385970149253731,
						"acc_stderr,none": 0.14801113183867534,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037186,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.00244335219932982,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469343,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.813,
						"acc_stderr,none": 0.012336254828074125,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248118,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.012953717566737247,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.618,
						"acc_stderr,none": 0.015372453034968526,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.01284137457209693,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336664,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045052,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426122,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099207,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.00597215762238965,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.0077436402269193,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286431,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078153,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.00949157995752504,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.783,
						"acc_stderr,none": 0.01304151375727071,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.822,
						"acc_stderr,none": 0.012102167676183601,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.010605256784796594,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847167,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696846,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.003299983316607816,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.342,
						"acc_stderr,none": 0.01500870618212173,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.00843458014024064,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.012559527926707361,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.014553205687950424,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.011419913065098689,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.00896305396259207,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817138,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796393,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.451,
						"acc_stderr,none": 0.015743152379585533,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904633,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.015768596914394382,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.581,
						"acc_stderr,none": 0.0156103389675778,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.644,
						"acc_stderr,none": 0.015149042659306628,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333337,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.013699915608779773,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996695,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491113,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.011569479368271294,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275286,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437707,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408044,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.014442734941575018,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.01581879370351089,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679196,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315143,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565894,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.013607356839598116,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.015801065586651758,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713329,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823333,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.01426700906103131,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787728,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491158,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.776,
						"acc_stderr,none": 0.013190830072364464,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504387,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280313,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656798,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275287,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.0158161357527732,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.399,
						"acc_stderr,none": 0.015493193313162908,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.5712538226299694,
						"acc_stderr,none": 0.008655800332760226,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.35714285714285715,
						"acc_stderr,none": 0.0646095738380922,
						"alias": "cb",
						"f1,none": 0.31203703703703706,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2310549777117385,
						"acc_norm,none": 0.2310549777117385,
						"acc_norm_stderr,none": 0.10765310032289531,
						"acc_stderr,none": 0.10765310032289531,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736841,
						"acc_stderr,none": 0.05263157894736841,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757577,
						"acc_stderr,none": 0.07575757575757577,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.06429065810876616,
						"acc_stderr,none": 0.06429065810876616,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445796,
						"acc_stderr,none": 0.06180629713445796,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.06861056852129647,
						"acc_stderr,none": 0.06861056852129647,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.16129032258064516,
						"acc_norm,none": 0.16129032258064516,
						"acc_norm_stderr,none": 0.06715051611181073,
						"acc_stderr,none": 0.06715051611181073,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.14285714285714285,
						"acc_norm,none": 0.14285714285714285,
						"acc_norm_stderr,none": 0.07824607964359517,
						"acc_stderr,none": 0.07824607964359517,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271772,
						"acc_stderr,none": 0.10163945352271772,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.06957698714453991,
						"acc_stderr,none": 0.06957698714453991,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.06148754619013454,
						"acc_stderr,none": 0.06148754619013454,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.25306510101882235,
						"acc_norm,none": 0.25306510101882235,
						"acc_norm_stderr,none": 0.03499944607117969,
						"acc_stderr,none": 0.03499944607117969,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03571428571428571,
						"acc_stderr,none": 0.03571428571428571,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018759,
						"acc_stderr,none": 0.03489370652018759,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.24880382775119617,
						"acc_norm,none": 0.24880382775119617,
						"acc_norm_stderr,none": 0.029975990636702532,
						"acc_stderr,none": 0.029975990636702532,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865143,
						"acc_stderr,none": 0.03462157845865143,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2595419847328244,
						"acc_norm,none": 0.2595419847328244,
						"acc_norm_stderr,none": 0.03844876139785271,
						"acc_stderr,none": 0.03844876139785271,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037267799624996496,
						"acc_stderr,none": 0.037267799624996496,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2616822429906542,
						"acc_norm,none": 0.2616822429906542,
						"acc_norm_stderr,none": 0.04269291915728109,
						"acc_stderr,none": 0.04269291915728109,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2476780185758514,
						"acc_norm,none": 0.2476780185758514,
						"acc_norm_stderr,none": 0.024055681892974835,
						"acc_stderr,none": 0.024055681892974835,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604257,
						"acc_stderr,none": 0.030587591351604257,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.25139664804469275,
						"acc_norm,none": 0.25139664804469275,
						"acc_norm_stderr,none": 0.032515888371841106,
						"acc_stderr,none": 0.032515888371841106,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.02830465794303529,
						"acc_stderr,none": 0.02830465794303529,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.045223500773820306,
						"acc_stderr,none": 0.045223500773820306,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.044801270921106716,
						"acc_stderr,none": 0.044801270921106716,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.040191074725573483,
						"acc_stderr,none": 0.040191074725573483,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.02582505450222104,
						"acc_stderr,none": 0.02582505450222104,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.25980392156862747,
						"acc_norm,none": 0.25980392156862747,
						"acc_norm_stderr,none": 0.030778554678693257,
						"acc_stderr,none": 0.030778554678693257,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.25146198830409355,
						"acc_norm,none": 0.25146198830409355,
						"acc_norm_stderr,none": 0.033275044238468436,
						"acc_stderr,none": 0.033275044238468436,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.25170068027210885,
						"acc_norm,none": 0.25170068027210885,
						"acc_norm_stderr,none": 0.03591728013761648,
						"acc_stderr,none": 0.03591728013761648,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.03694846055443904,
						"acc_stderr,none": 0.03694846055443904,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.034229240176444506,
						"acc_stderr,none": 0.034229240176444506,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25153374233128833,
						"acc_norm,none": 0.25153374233128833,
						"acc_norm_stderr,none": 0.03408997886857529,
						"acc_stderr,none": 0.03408997886857529,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.028394293050790515,
						"acc_stderr,none": 0.028394293050790515,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.030532892233932032,
						"acc_stderr,none": 0.030532892233932032,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.27310924369747897,
						"acc_norm,none": 0.27310924369747897,
						"acc_norm_stderr,none": 0.028942004040998164,
						"acc_stderr,none": 0.028942004040998164,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26956521739130435,
						"acc_norm,none": 0.26956521739130435,
						"acc_norm_stderr,none": 0.02932276422894952,
						"acc_stderr,none": 0.02932276422894952,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.24475524475524477,
						"acc_norm,none": 0.24475524475524477,
						"acc_norm_stderr,none": 0.036079930330813775,
						"acc_stderr,none": 0.036079930330813775,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26704545454545453,
						"acc_norm,none": 0.26704545454545453,
						"acc_norm_stderr,none": 0.03344352850079126,
						"acc_stderr,none": 0.03344352850079126,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2483221476510067,
						"acc_norm,none": 0.2483221476510067,
						"acc_norm_stderr,none": 0.0355134404169743,
						"acc_stderr,none": 0.0355134404169743,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.03980329854920432,
						"acc_stderr,none": 0.03980329854920432,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.036421927837417066,
						"acc_stderr,none": 0.036421927837417066,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604674,
						"acc_stderr,none": 0.03893259610604674,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25304136253041365,
						"acc_norm,none": 0.25304136253041365,
						"acc_norm_stderr,none": 0.021470991853398305,
						"acc_stderr,none": 0.021470991853398305,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.24766355140186916,
						"acc_norm,none": 0.24766355140186916,
						"acc_norm_stderr,none": 0.02957653529316448,
						"acc_stderr,none": 0.02957653529316448,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2601626016260163,
						"acc_norm,none": 0.2601626016260163,
						"acc_norm_stderr,none": 0.039720129754505354,
						"acc_stderr,none": 0.039720129754505354,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2540983606557377,
						"acc_norm,none": 0.2540983606557377,
						"acc_norm_stderr,none": 0.03957756102798664,
						"acc_stderr,none": 0.03957756102798664,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24285714285714285,
						"acc_norm,none": 0.24285714285714285,
						"acc_norm_stderr,none": 0.02966137041396583,
						"acc_stderr,none": 0.02966137041396583,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032364888900157734,
						"acc_stderr,none": 0.032364888900157734,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871163,
						"acc_stderr,none": 0.03152480234871163,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.040832215386495736,
						"acc_stderr,none": 0.040832215386495736,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.25517241379310346,
						"acc_norm,none": 0.25517241379310346,
						"acc_norm_stderr,none": 0.03632984052707842,
						"acc_stderr,none": 0.03632984052707842,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.032888897342098225,
						"acc_stderr,none": 0.032888897342098225,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.25118483412322273,
						"acc_norm,none": 0.25118483412322273,
						"acc_norm_stderr,none": 0.029927771242945208,
						"acc_stderr,none": 0.029927771242945208,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2526595744680851,
						"acc_norm,none": 0.2526595744680851,
						"acc_norm_stderr,none": 0.022439412582786405,
						"acc_stderr,none": 0.022439412582786405,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.2629310344827586,
						"acc_norm,none": 0.2629310344827586,
						"acc_norm_stderr,none": 0.028964697544540164,
						"acc_stderr,none": 0.028964697544540164,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.25287356321839083,
						"acc_norm,none": 0.25287356321839083,
						"acc_norm_stderr,none": 0.0330465186437516,
						"acc_stderr,none": 0.0330465186437516,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.03712537833614866,
						"acc_stderr,none": 0.03712537833614866,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.028952167450890808,
						"acc_stderr,none": 0.028952167450890808,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.0327931779226895,
						"acc_stderr,none": 0.0327931779226895,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2360248447204969,
						"acc_norm,none": 0.2360248447204969,
						"acc_norm_stderr,none": 0.03357055232967969,
						"acc_stderr,none": 0.03357055232967969,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.03258848578270236,
						"mcc_stderr,none": 0.03382000359522758
					},
					"copa": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.04163331998932261,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.361396839594514,
						"likelihood_diff_stderr,none": 0.4872343588064916,
						"pct_stereotype,none": 0.5578413834227789,
						"pct_stereotype_stderr,none": 0.07984053245823468
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.385584376863447,
						"likelihood_diff_stderr,none": 0.0876066875196462,
						"pct_stereotype,none": 0.6058437686344663,
						"pct_stereotype_stderr,none": 0.011936514060829238
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.59478021978022,
						"likelihood_diff_stderr,none": 0.3607135185192526,
						"pct_stereotype,none": 0.6813186813186813,
						"pct_stereotype_stderr,none": 0.04911704114831278
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 7.4772727272727275,
						"likelihood_diff_stderr,none": 2.033093965949475,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.230769230769231,
						"likelihood_diff_stderr,none": 0.6214153861151104,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.05769230769230768
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.368359375,
						"likelihood_diff_stderr,none": 0.16949545129621751,
						"pct_stereotype,none": 0.59375,
						"pct_stereotype_stderr,none": 0.0274981297454651
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.3211805555555554,
						"likelihood_diff_stderr,none": 0.23206048052760544,
						"pct_stereotype,none": 0.5370370370370371,
						"pct_stereotype_stderr,none": 0.03400603625538272
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.4322916666666665,
						"likelihood_diff_stderr,none": 0.3181217486482622,
						"pct_stereotype,none": 0.7222222222222222,
						"pct_stereotype_stderr,none": 0.053156331218399945
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.1493602362204722,
						"likelihood_diff_stderr,none": 0.14529682630633645,
						"pct_stereotype,none": 0.5295275590551181,
						"pct_stereotype_stderr,none": 0.022167024359332235
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.391891891891892,
						"likelihood_diff_stderr,none": 0.39010818263751607,
						"pct_stereotype,none": 0.6576576576576577,
						"pct_stereotype_stderr,none": 0.04524117824423199
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.133064516129032,
						"likelihood_diff_stderr,none": 0.4490409370668373,
						"pct_stereotype,none": 0.8279569892473119,
						"pct_stereotype_stderr,none": 0.039348528120618634
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.128289473684211,
						"likelihood_diff_stderr,none": 0.24837685663013503,
						"pct_stereotype,none": 0.6473684210526316,
						"pct_stereotype_stderr,none": 0.034754052595820976
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.3389982110912344,
						"likelihood_diff_stderr,none": 0.08133969899891952,
						"pct_stereotype,none": 0.5098389982110912,
						"pct_stereotype_stderr,none": 0.012210934351196742
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.3333333333333335,
						"likelihood_diff_stderr,none": 0.29820127640310234,
						"pct_stereotype,none": 0.4666666666666667,
						"pct_stereotype_stderr,none": 0.05288198530254015
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.980769230769231,
						"likelihood_diff_stderr,none": 0.808836186338965,
						"pct_stereotype,none": 0.3076923076923077,
						"pct_stereotype_stderr,none": 0.13323467750529824
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.856060606060606,
						"likelihood_diff_stderr,none": 0.43636626682708834,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.0584705346204686
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.0225856697819315,
						"likelihood_diff_stderr,none": 0.1612741325578971,
						"pct_stereotype,none": 0.5202492211838006,
						"pct_stereotype_stderr,none": 0.027927918885132314
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.555830039525692,
						"likelihood_diff_stderr,none": 0.21150566182832983,
						"pct_stereotype,none": 0.3794466403162055,
						"pct_stereotype_stderr,none": 0.030567832939072927
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.6458333333333335,
						"likelihood_diff_stderr,none": 0.5025366016458306,
						"pct_stereotype,none": 0.5555555555555556,
						"pct_stereotype_stderr,none": 0.05897165471491952
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.0304347826086957,
						"likelihood_diff_stderr,none": 0.15441160143948135,
						"pct_stereotype,none": 0.4217391304347826,
						"pct_stereotype_stderr,none": 0.023050349185909667
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.1684782608695654,
						"likelihood_diff_stderr,none": 0.28472608629667634,
						"pct_stereotype,none": 0.7478260869565218,
						"pct_stereotype_stderr,none": 0.04067222754154717
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.681318681318681,
						"likelihood_diff_stderr,none": 0.3472334391511604,
						"pct_stereotype,none": 0.8021978021978022,
						"pct_stereotype_stderr,none": 0.041988952031962214
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.5931122448979593,
						"likelihood_diff_stderr,none": 0.26887265137324506,
						"pct_stereotype,none": 0.5561224489795918,
						"pct_stereotype_stderr,none": 0.0355794719495366
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.011811023622047244,
						"exact_match_stderr,none": 0.0023972250639872506
					},
					"glue": {
						"acc,none": 0.4610788490020706,
						"acc_stderr,none": 0.0554467594315232,
						"alias": "glue",
						"f1,none": 0.5046331229123735,
						"f1_stderr,none": 0.000667253235341036,
						"mcc,none": 0.03258848578270236,
						"mcc_stderr,none": 0.0011437926431812062
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.0,
						"exact_match_stderr,get-answer": 0.0
					},
					"hellaswag": {
						"acc,none": 0.400318661621191,
						"acc_norm,none": 0.5164309898426608,
						"acc_norm_stderr,none": 0.004987086426968589,
						"acc_stderr,none": 0.004889615413144198,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.09717008374241987,
						"acc_norm,none": 0.09717008374241987,
						"acc_norm_stderr,none": 0.0655657065822655,
						"acc_stderr,none": 0.0655657065822655,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.03861229196653697,
						"acc_stderr,none": 0.03861229196653697,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.088,
						"acc_norm,none": 0.088,
						"acc_norm_stderr,none": 0.008963053962592081,
						"acc_stderr,none": 0.008963053962592081,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.075,
						"acc_norm,none": 0.075,
						"acc_norm_stderr,none": 0.008333333333333378,
						"acc_stderr,none": 0.008333333333333378,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.193,
						"acc_norm,none": 0.193,
						"acc_norm_stderr,none": 0.012486268734370098,
						"acc_stderr,none": 0.012486268734370098,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.196,
						"acc_norm,none": 0.196,
						"acc_norm_stderr,none": 0.012559527926707352,
						"acc_stderr,none": 0.012559527926707352,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.17833333333333334,
						"acc_norm,none": 0.17833333333333334,
						"acc_norm_stderr,none": 0.015640501955765617,
						"acc_stderr,none": 0.015640501955765617,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.007,
						"acc_norm,none": 0.007,
						"acc_norm_stderr,none": 0.0026377941462437785,
						"acc_stderr,none": 0.0026377941462437785,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.002,
						"acc_norm,none": 0.002,
						"acc_norm_stderr,none": 0.0014135055705578176,
						"acc_stderr,none": 0.0014135055705578176,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.019,
						"acc_norm,none": 0.019,
						"acc_norm_stderr,none": 0.004319451082910625,
						"acc_stderr,none": 0.004319451082910625,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.033,
						"acc_norm,none": 0.033,
						"acc_norm_stderr,none": 0.005651808820452374,
						"acc_stderr,none": 0.005651808820452374,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.040347329239296424,
						"acc_stderr,none": 0.040347329239296424,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.018,
						"acc_norm,none": 0.018,
						"acc_norm_stderr,none": 0.004206387249611468,
						"acc_stderr,none": 0.004206387249611468,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.015,
						"acc_norm,none": 0.015,
						"acc_norm_stderr,none": 0.003845749574502999,
						"acc_stderr,none": 0.003845749574502999,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.177,
						"acc_norm,none": 0.177,
						"acc_norm_stderr,none": 0.012075463420375061,
						"acc_stderr,none": 0.012075463420375061,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.018,
						"acc_norm,none": 0.018,
						"acc_norm_stderr,none": 0.004206387249611491,
						"acc_stderr,none": 0.004206387249611491,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.131,
						"acc_norm,none": 0.131,
						"acc_norm_stderr,none": 0.010674874844837956,
						"acc_stderr,none": 0.010674874844837956,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.11,
						"acc_norm,none": 0.11,
						"acc_norm_stderr,none": 0.009899393819724432,
						"acc_stderr,none": 0.009899393819724432,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.081,
						"acc_norm,none": 0.081,
						"acc_norm_stderr,none": 0.008632121032139966,
						"acc_stderr,none": 0.008632121032139966,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.07,
						"acc_norm,none": 0.07,
						"acc_norm_stderr,none": 0.008072494358323485,
						"acc_stderr,none": 0.008072494358323485,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.018,
						"acc_norm,none": 0.018,
						"acc_norm_stderr,none": 0.0042063872496114615,
						"acc_stderr,none": 0.0042063872496114615,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.028,
						"acc_norm,none": 0.028,
						"acc_norm_stderr,none": 0.005219506034410047,
						"acc_stderr,none": 0.005219506034410047,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.057,
						"acc_norm,none": 0.057,
						"acc_norm_stderr,none": 0.007335175853706822,
						"acc_stderr,none": 0.007335175853706822,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.231,
						"acc_norm,none": 0.231,
						"acc_norm_stderr,none": 0.013334797216936426,
						"acc_stderr,none": 0.013334797216936426,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.07,
						"acc_norm,none": 0.07,
						"acc_norm_stderr,none": 0.008072494358323494,
						"acc_stderr,none": 0.008072494358323494,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.186,
						"acc_norm,none": 0.186,
						"acc_norm_stderr,none": 0.012310790208412808,
						"acc_stderr,none": 0.012310790208412808,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.14,
						"acc_norm,none": 0.14,
						"acc_norm_stderr,none": 0.014177505755565045,
						"acc_stderr,none": 0.014177505755565045,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.102,
						"acc_norm,none": 0.102,
						"acc_norm_stderr,none": 0.009575368801653897,
						"acc_stderr,none": 0.009575368801653897,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.105,
						"acc_norm,none": 0.105,
						"acc_norm_stderr,none": 0.009698921026024952,
						"acc_stderr,none": 0.009698921026024952,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.058,
						"acc_norm,none": 0.058,
						"acc_norm_stderr,none": 0.00739531545579295,
						"acc_stderr,none": 0.00739531545579295,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.078,
						"acc_norm,none": 0.078,
						"acc_norm_stderr,none": 0.008484573530118583,
						"acc_stderr,none": 0.008484573530118583,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22666666666666666,
						"acc_norm,none": 0.22666666666666666,
						"acc_norm_stderr,none": 0.024212609617951908,
						"acc_stderr,none": 0.024212609617951908,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.013394902889660013,
						"acc_stderr,none": 0.013394902889660013,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.039,
						"acc_norm,none": 0.039,
						"acc_norm_stderr,none": 0.006125072776426111,
						"acc_stderr,none": 0.006125072776426111,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.113,
						"acc_norm,none": 0.113,
						"acc_norm_stderr,none": 0.010016552866696839,
						"acc_stderr,none": 0.010016552866696839,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.027234326551496862,
						"acc_stderr,none": 0.027234326551496862,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.136,
						"acc_norm,none": 0.136,
						"acc_norm_stderr,none": 0.010845350230472986,
						"acc_stderr,none": 0.010845350230472986,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.152,
						"acc_norm,none": 0.152,
						"acc_norm_stderr,none": 0.011358918303475279,
						"acc_stderr,none": 0.011358918303475279,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.019,
						"acc_norm,none": 0.019,
						"acc_norm_stderr,none": 0.004319451082910608,
						"acc_stderr,none": 0.004319451082910608,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.49090111817583865,
						"acc_norm,none": 0.438,
						"acc_norm_stderr,none": 0.0004932985971943903,
						"acc_stderr,none": 0.04610883621932799,
						"alias": "kobest",
						"f1,none": 0.3915989537193584,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.535,
						"acc_stderr,none": 0.015780495050030156,
						"alias": " - kobest_copa",
						"f1,none": 0.5346086058375092,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.346,
						"acc_norm,none": 0.438,
						"acc_norm_stderr,none": 0.022210326363977417,
						"acc_stderr,none": 0.021294951277234637,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.34257283605937583,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5314861460957179,
						"acc_stderr,none": 0.025076077305681316,
						"alias": " - kobest_sentineg",
						"f1,none": 0.49767346938775514,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5602561614593441,
						"acc_stderr,none": 0.021511261110812505,
						"alias": "lambada",
						"perplexity,none": 7.6510207311571214,
						"perplexity_stderr,none": 0.7787707961725762
					},
					"lambada_cloze": {
						"acc,none": 0.01571899864156802,
						"acc_stderr,none": 0.0058867052760591745,
						"alias": "lambada_cloze",
						"perplexity,none": 676.6021431590541,
						"perplexity_stderr,none": 70.78269890804017
					},
					"lambada_multilingual": {
						"acc,none": 0.38482437415098003,
						"acc_stderr,none": 0.0633403976700295,
						"alias": "lambada_multilingual",
						"perplexity,none": 72.53961763566299,
						"perplexity_stderr,none": 22.176978026270515
					},
					"lambada_openai": {
						"acc,none": 0.6006209974771978,
						"acc_stderr,none": 0.006823464591736833,
						"alias": " - lambada_openai",
						"perplexity,none": 6.1474198581387185,
						"perplexity_stderr,none": 0.15046766334689085
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.00446341936735882,
						"acc_stderr,none": 0.0009286980441682211,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 809.2848101778802,
						"perplexity_stderr,none": 30.182673706796486
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3079759363477586,
						"acc_stderr,none": 0.006431778256505183,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 96.73887302349743,
						"perplexity_stderr,none": 5.8392702313842735
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6012031826120706,
						"acc_stderr,none": 0.006821793205930761,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 6.1459615737496325,
						"perplexity_stderr,none": 0.15039979139037377
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.31729089850572484,
						"acc_stderr,none": 0.006484234706911058,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 93.8867828053737,
						"perplexity_stderr,none": 5.2692732356580825
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.37065786920240634,
						"acc_stderr,none": 0.006728869231430023,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 61.452182144663574,
						"perplexity_stderr,none": 3.4711396710082694
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.32699398408693964,
						"acc_stderr,none": 0.006535689740487132,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 104.47428863103063,
						"perplexity_stderr,none": 6.306228990487898
					},
					"lambada_standard": {
						"acc,none": 0.5195032020182418,
						"acc_stderr,none": 0.006960676273955534,
						"alias": " - lambada_standard",
						"perplexity,none": 9.155029929555699,
						"perplexity_stderr,none": 0.24333039800956996
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.026974577915777218,
						"acc_stderr,none": 0.0022571036096265327,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 543.9194761402281,
						"perplexity_stderr,none": 17.501600465528313
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.2455470737913486,
						"exact_match_stderr,get-answer": 0.010859138259206537
					},
					"logiqa": {
						"acc,none": 0.2457757296466974,
						"acc_norm,none": 0.282642089093702,
						"acc_norm_stderr,none": 0.017661585370360625,
						"acc_stderr,none": 0.016887410894296944,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.23982188295165394,
						"acc_norm,none": 0.2589058524173028,
						"acc_norm_stderr,none": 0.011051456868610528,
						"acc_stderr,none": 0.010772437759520099,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.23953098827470687,
						"acc_norm,none": 0.24623115577889448,
						"acc_norm_stderr,none": 0.007886624866001841,
						"acc_stderr,none": 0.007813078802813294,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.5557085363270493,
						"acc_stderr,none": 0.005113856295728005,
						"alias": "mc_taco",
						"f1,none": 0.40589151678232543,
						"f1_stderr,none": 0.007373093525028702
					},
					"medmcqa": {
						"acc,none": 0.27229261295720775,
						"acc_norm,none": 0.27229261295720775,
						"acc_norm_stderr,none": 0.006883420600931438,
						"acc_stderr,none": 0.006883420600931438,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.23723487824037706,
						"acc_norm,none": 0.23723487824037706,
						"acc_norm_stderr,none": 0.011927272102238139,
						"acc_stderr,none": 0.011927272102238139,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.25032046716991885,
						"acc_stderr,none": 0.04027843155617069,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.17,
						"acc_stderr,none": 0.0377525168068637,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.1925925925925926,
						"acc_stderr,none": 0.03406542058502652,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.21052631578947367,
						"acc_stderr,none": 0.03317672787533157,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421276,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.27547169811320754,
						"acc_stderr,none": 0.027495663683724064,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2152777777777778,
						"acc_stderr,none": 0.03437079344106135,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909283,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.038612291966536934,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2543352601156069,
						"acc_stderr,none": 0.0332055644308557,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.045338381959297736,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3404255319148936,
						"acc_stderr,none": 0.030976692998534443,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.039994238792813365,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.03724563619774632,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.23544973544973544,
						"acc_stderr,none": 0.021851509822031708,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.040735243221471255,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.23870967741935484,
						"acc_stderr,none": 0.024251071262208837,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.22167487684729065,
						"acc_stderr,none": 0.029225575892489614,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909284,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.031234752377721175,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.21212121212121213,
						"acc_stderr,none": 0.029126522834586846,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.21761658031088082,
						"acc_stderr,none": 0.029778663037752947,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.23333333333333334,
						"acc_stderr,none": 0.021444547301560483,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2740740740740741,
						"acc_stderr,none": 0.027195934804085622,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.20588235294117646,
						"acc_stderr,none": 0.026265024608275882,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.19205298013245034,
						"acc_stderr,none": 0.032162984205936156,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.24770642201834864,
						"acc_stderr,none": 0.018508143602547832,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.16203703703703703,
						"acc_stderr,none": 0.02513045365226846,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.029771775228145628,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.24472573839662448,
						"acc_stderr,none": 0.027985699387036416,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.37668161434977576,
						"acc_stderr,none": 0.03252113489929187,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2748091603053435,
						"acc_stderr,none": 0.03915345408847834,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.24272051009564294,
						"acc_stderr,none": 0.0275756742589877,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.04065578140908705,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.28703703703703703,
						"acc_stderr,none": 0.043733130409147614,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2392638036809816,
						"acc_stderr,none": 0.03351953879521269,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2767857142857143,
						"acc_stderr,none": 0.04246624336697626,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.24271844660194175,
						"acc_stderr,none": 0.04245022486384495,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.27350427350427353,
						"acc_stderr,none": 0.029202540153431187,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.26947637292464877,
						"acc_stderr,none": 0.015866243073215058,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.21965317919075145,
						"acc_stderr,none": 0.022289638852617897,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2424581005586592,
						"acc_stderr,none": 0.014333522059217892,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.25163398692810457,
						"acc_stderr,none": 0.024848018263875192,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2780817508850981,
						"acc_stderr,none": 0.040202166523326935,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.24437299035369775,
						"acc_stderr,none": 0.024406162094668903,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.02409347123262133,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.28368794326241137,
						"acc_stderr,none": 0.026891709428343957,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.24511082138200782,
						"acc_stderr,none": 0.010986307870045533,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.2536764705882353,
						"acc_stderr,none": 0.026431329870789548,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.25326797385620914,
						"acc_stderr,none": 0.017593486895366835,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.33636363636363636,
						"acc_stderr,none": 0.04525393596302506,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.23673469387755103,
						"acc_stderr,none": 0.027212835884073167,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.24406889827754305,
						"acc_stderr,none": 0.034902212268621446,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.263681592039801,
						"acc_stderr,none": 0.031157150869355558,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2404059625753251,
						"acc_stderr,none": 0.05289275274815807,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768081,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.2891566265060241,
						"acc_stderr,none": 0.03529486801511115,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.21637426900584794,
						"acc_stderr,none": 0.03158149539338734,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.39266428935303105,
						"acc_stderr,none": 0.004929491082595716,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.39157851912123676,
						"acc_stderr,none": 0.004922807472681484,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6323529411764706,
						"acc_stderr,none": 0.02390001176903565,
						"alias": "mrpc",
						"f1,none": 0.7611464968152867,
						"f1_stderr,none": 0.01893630252714745
					},
					"multimedqa": {
						"acc,none": 0.28644428672817596,
						"acc_norm,none": 0.25994633802960593,
						"acc_norm_stderr,none": 0.00011957776310244825,
						"acc_stderr,none": 0.08330426692719474,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5719884488448845,
						"acc_stderr,none": 0.007106976252751528,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6654439445744788,
						"mrr_stderr,none": 0.010322328489342862,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.44130925507900676,
						"r@2_stderr,none": 0.016691125435903995
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6156884895692292,
						"mrr_stderr,none": 0.010268835127900085,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.47742663656884876,
						"r@2_stderr,none": 0.016790178837117326
					},
					"openbookqa": {
						"acc,none": 0.226,
						"acc_norm,none": 0.33,
						"acc_norm_stderr,none": 0.021049612166134806,
						"acc_stderr,none": 0.018722956449139926,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.5285,
						"acc_stderr,none": 0.011164954236428808,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.5215,
						"acc_stderr,none": 0.011172792428275121,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.5265,
						"acc_stderr,none": 0.011167418260963935,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5475,
						"acc_stderr,none": 0.011132557743886098,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5495,
						"acc_stderr,none": 0.011128198119942883,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5465,
						"acc_stderr,none": 0.011134669525078671,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5385,
						"acc_stderr,none": 0.011149934327957058,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5369285714285714,
						"acc_stderr,none": 0.013536653575600844,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7149075081610446,
						"acc_norm,none": 0.7067464635473341,
						"acc_norm_stderr,none": 0.010621818421101924,
						"acc_stderr,none": 0.010533270588738935,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2686272416737831,
						"acc_norm,none": 0.27444491887275835,
						"acc_norm_stderr,none": 0.003260137689067267,
						"acc_stderr,none": 0.0032383036370811106,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.02191237788577997,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7142431553991406,
						"acc_norm,none": 0.45478932781259745,
						"acc_norm_stderr,none": 0.003881017061448089,
						"acc_stderr,none": 0.14877406948199687,
						"alias": "pythia",
						"bits_per_byte,none": 0.7637339753552164,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6978793848608749,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 6.1474198581387185,
						"perplexity_stderr,none": 0.15046766334689085,
						"word_perplexity,none": 16.959594076350623,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.28900709219858156,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.05356214311840289,
						"acc_stderr,none": 0.034528285726846755,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.43333333333333335,
						"acc_norm_stderr,none": 0.04542567625794981,
						"acc_stderr,none": 0.041666666666666616,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.30625,
						"acc_norm_stderr,none": 0.0365545115043377,
						"acc_stderr,none": 0.03404916326237584,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.31338028169014087,
						"acc_norm,none": 0.30633802816901406,
						"acc_norm_stderr,none": 0.027401931831161554,
						"acc_stderr,none": 0.027574062217983555,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4953322350356947,
						"acc_stderr,none": 0.006765115735419827,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.4787534009398961,
						"acc_stderr,none": 0.002484454534596091,
						"alias": "qqp",
						"f1,none": 0.501937984496124,
						"f1_stderr,none": 0.0029703091933685364
					},
					"race": {
						"acc,none": 0.32727272727272727,
						"acc_stderr,none": 0.014521924541567924,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5703971119133574,
						"acc_stderr,none": 0.02979666882912467,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.847,
						"acc_norm,none": 0.797,
						"acc_norm_stderr,none": 0.012726073744598257,
						"acc_stderr,none": 0.011389500459665547,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5631768953068592,
						"acc_stderr,none": 0.029855247390314945,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8279816513761468,
						"acc_stderr,none": 0.012787588897266161,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5268919324202739,
						"acc_norm,none": 0.7143856842947116,
						"acc_norm_stderr,none": 0.0031936482384900926,
						"acc_stderr,none": 0.003529975356433948,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.4980533093740641,
						"acc_stderr,none": 0.0050832653243415545,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.49849759615384615,
						"acc_stderr,none": 0.005004232835002921,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.49559136515658253,
						"acc_stderr,none": 0.005033644799289787,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.004950980415950501,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3056277590681946,
						"acc_stderr,none": 0.045290383112689264,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.0208078335373317,
						"bleu_acc_stderr,none": 2.4969200491193862e-05,
						"bleu_diff,none": 0.0008253782641055972,
						"bleu_diff_stderr,none": 1.1262767938605328e-05,
						"bleu_max,none": 0.025113949832601092,
						"bleu_max_stderr,none": 1.8615258494472413e-05,
						"rouge1_acc,none": 0.0208078335373317,
						"rouge1_acc_stderr,none": 2.4969200491193733e-05,
						"rouge1_diff,none": -0.006365325137735726,
						"rouge1_diff_stderr,none": 0.0001700957251918172,
						"rouge1_max,none": 0.13394395330398728,
						"rouge1_max_stderr,none": 0.002079138328127845,
						"rouge2_acc,none": 0.0,
						"rouge2_acc_stderr,none": 0.0,
						"rouge2_diff,none": 0.0,
						"rouge2_diff_stderr,none": 0.0,
						"rouge2_max,none": 0.0,
						"rouge2_max_stderr,none": 0.0,
						"rougeL_acc,none": 0.022031823745410038,
						"rougeL_acc_stderr,none": 2.640492951943825e-05,
						"rougeL_diff,none": 0.001958768850139991,
						"rougeL_diff_stderr,none": 0.00010097143017710256,
						"rougeL_max,none": 0.12356928454180827,
						"rougeL_max_stderr,none": 0.0020212436979291475
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.0208078335373317,
						"bleu_acc_stderr,none": 0.004996919099924859,
						"bleu_diff,none": 0.0008253782641055972,
						"bleu_diff_stderr,none": 0.0033560047584300784,
						"bleu_max,none": 0.025113949832601092,
						"bleu_max_stderr,none": 0.004314540357265466,
						"rouge1_acc,none": 0.0208078335373317,
						"rouge1_acc_stderr,none": 0.004996919099924846,
						"rouge1_diff,none": -0.006365325137735726,
						"rouge1_diff_stderr,none": 0.013042075187324186,
						"rouge1_max,none": 0.13394395330398728,
						"rouge1_max_stderr,none": 0.04559756932258391,
						"rouge2_acc,none": 0.0,
						"rouge2_acc_stderr,none": 0.0,
						"rouge2_diff,none": 0.0,
						"rouge2_diff_stderr,none": 0.0,
						"rouge2_max,none": 0.0,
						"rouge2_max_stderr,none": 0.0,
						"rougeL_acc,none": 0.022031823745410038,
						"rougeL_acc_stderr,none": 0.005138572712284828,
						"rougeL_diff,none": 0.001958768850139991,
						"rougeL_diff_stderr,none": 0.01004845411877382,
						"rougeL_max,none": 0.12356928454180827,
						"rougeL_max_stderr,none": 0.044958243937337536
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2141982864137087,
						"acc_stderr,none": 0.01436214815569046,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3512563882179406,
						"acc_stderr,none": 0.01364609178740015,
						"alias": "truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.011811023622047244,
						"exact_match_stderr,none": 0.0023972250639872506
					},
					"wic": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.01981072129375818,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.7637339753552164,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6978793848608749,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 16.959594076350623,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.5516969218626677,
						"acc_stderr,none": 0.01397717130712634,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.4326923076923077,
						"acc_stderr,none": 0.04881803687006195,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.6996336996336996,
						"acc_stderr,none": 0.027795629283121376,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5539999999999999,
						"acc_stderr,none": 0.03940564852000849,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044292,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.614,
						"acc_stderr,none": 0.021793529219281165,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.02204949796982787,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.022352791650914167,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.022270877485360437,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.022242244375731017,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.598,
						"acc_stderr,none": 0.021948929609938612,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.021854684955611263,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.3933868808567604,
						"acc_stderr,none": 0.042467021925206294,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.334136546184739,
						"acc_stderr,none": 0.009454577602463623,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4004016064257028,
						"acc_stderr,none": 0.00982122560976308,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4506024096385542,
						"acc_stderr,none": 0.009973042774811681,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3610441767068273,
						"acc_stderr,none": 0.00962726974219572,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.4979919678714859,
						"acc_stderr,none": 0.010021992045038413,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.44136546184738956,
						"acc_stderr,none": 0.009952922349377748,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.44859437751004017,
						"acc_stderr,none": 0.009968964736894261,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3706827309236948,
						"acc_stderr,none": 0.009681074302261282,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.42329317269076305,
						"acc_stderr,none": 0.009903432138272912,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667057,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.37389558232931724,
						"acc_stderr,none": 0.009698087600721318,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.36947791164658633,
						"acc_stderr,none": 0.009674576085776447,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3514056224899598,
						"acc_stderr,none": 0.009569263079823967,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.009819585875881305,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3441767068273092,
						"acc_stderr,none": 0.00952295446980604,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.559593285602551,
						"acc_stderr,none": 0.04909571529908462,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5082726671078756,
						"acc_stderr,none": 0.012865364020375395,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.6929185969556585,
						"acc_stderr,none": 0.011870783739438444,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6055592322964924,
						"acc_stderr,none": 0.012577106513936133,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.528127068166777,
						"acc_stderr,none": 0.012846749995797695,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5367306419589676,
						"acc_stderr,none": 0.012832359240206969,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.585043017868961,
						"acc_stderr,none": 0.012679641217262479,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4956982131039047,
						"acc_stderr,none": 0.012866649085718848,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5691594970218399,
						"acc_stderr,none": 0.012743443034698407,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.513567174056916,
						"acc_stderr,none": 0.012862387586650079,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5400397088021178,
						"acc_stderr,none": 0.012825802370083987,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5804103242885507,
						"acc_stderr,none": 0.012699642268200756,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.6772308383906496,
						"acc_stderr,none": 0.05922927253493657,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.7634408602150538,
						"acc_stderr,none": 0.008815348871044423,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6385542168674698,
						"acc_stderr,none": 0.053053439348320096,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5495307612095933,
						"acc_stderr,none": 0.01607480892375643,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6463878326996197,
						"acc_stderr,none": 0.029536534656802057,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5714285714285714,
						"acc_stderr,none": 0.02792722339076032,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6111111111111112,
						"acc_stderr,none": 0.02173646243481744,
						"alias": " - xwinograd_zh"
					}
				}
			},
			"dtype=float16,trust_remote_code=True": {
				"confObj": {
					"dtype": "float16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=float16,trust_remote_code=True",
				"groups": {},
				"results": {
					"truthfulqa_mc2": {
						"acc,none": NaN,
						"acc_stderr,none": NaN,
						"alias": "truthfulqa_mc2"
					}
				}
			}
		},
		"name": "RWKV/rwkv-4-world-1b5"
	},
	"RWKV/rwkv-4-world-3b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5284667418263811,
						"acc_norm,none": 0.5059188275084555,
						"acc_norm_stderr,none": 0.03744309987127583,
						"acc_stderr,none": 0.04871592125265593,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.341875,
						"acc_stderr,none": 0.014697028808996227,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0109,
						"acc_stderr,none": 0.012720026949248496,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8386417910447761,
						"acc_stderr,none": 0.13709944964920545,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2578008915304606,
						"acc_norm,none": 0.2578008915304606,
						"acc_norm_stderr,none": 0.12471723400851625,
						"acc_stderr,none": 0.12471723400851625,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.25271973752374394,
						"acc_norm,none": 0.25271973752374394,
						"acc_norm_stderr,none": 0.0386446306799481,
						"acc_stderr,none": 0.0386446306799481,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.3912772063208108,
						"likelihood_diff_stderr,none": 0.45217241522457285,
						"pct_stereotype,none": 0.5629099582587955,
						"pct_stereotype_stderr,none": 0.08194503289161058
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.01624015748031496,
						"exact_match_stderr,none": 0.0028046889385479907
					},
					"glue": {
						"acc,none": 0.5359909327922994,
						"acc_stderr,none": 0.0912549478984433,
						"alias": "glue",
						"f1,none": 0.3062320791999928,
						"f1_stderr,none": 0.0023293639115965024,
						"mcc,none": 0.007054476296006027,
						"mcc_stderr,none": 0.0009723463262385443
					},
					"kmmlu": {
						"acc,none": 0.2950043315044758,
						"acc_norm,none": 0.2950043315044758,
						"acc_norm_stderr,none": 0.03006470095610117,
						"acc_stderr,none": 0.03006470095610117,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5003288752466565,
						"acc_norm,none": 0.448,
						"acc_norm_stderr,none": 0.0004955831663326678,
						"acc_stderr,none": 0.044495448642276544,
						"alias": "kobest",
						"f1,none": 0.40373553734304096,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6280807296720357,
						"acc_stderr,none": 0.01590021763812435,
						"alias": "lambada",
						"perplexity,none": 5.4995175027345065,
						"perplexity_stderr,none": 0.4154073293845765
					},
					"lambada_cloze": {
						"acc,none": 0.007665437609159713,
						"acc_stderr,none": 0.0012910586004762218,
						"alias": "lambada_cloze",
						"perplexity,none": 910.1699637679841,
						"perplexity_stderr,none": 34.49916941348411
					},
					"lambada_multilingual": {
						"acc,none": 0.43361148845332814,
						"acc_stderr,none": 0.06532621311621811,
						"alias": "lambada_multilingual",
						"perplexity,none": 48.10918047618764,
						"perplexity_stderr,none": 14.310185712534913
					},
					"mmlu": {
						"acc,none": 0.2511038313630537,
						"acc_stderr,none": 0.040348222828877914,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.24420828905419767,
						"acc_stderr,none": 0.026694910508076663,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2687479884132604,
						"acc_stderr,none": 0.045108539914000226,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2378940526486838,
						"acc_stderr,none": 0.03781927975280289,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2568981921979067,
						"acc_stderr,none": 0.04989991517141305,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2878637331440738,
						"acc_norm,none": 0.2626469598051318,
						"acc_norm_stderr,none": 8.778614059200267e-05,
						"acc_stderr,none": 0.10037047258687279,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5088571428571429,
						"acc_stderr,none": 0.01892792045987846,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7199914268735376,
						"acc_norm,none": 0.5121286006206975,
						"acc_norm_stderr,none": 0.0038348366896563705,
						"acc_stderr,none": 0.1371852338534413,
						"alias": "pythia",
						"bits_per_byte,none": 0.7220552283260857,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6495302422700988,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.711549267902551,
						"perplexity_stderr,none": 0.10507146167698257,
						"word_perplexity,none": 14.531932778850274,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.30851063829787234,
						"acc_norm,none": 0.38652482269503546,
						"acc_norm_stderr,none": 0.045679080146263956,
						"acc_stderr,none": 0.041197927799664004,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5339589364746598,
						"acc_stderr,none": 0.024148667232371378,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3106305943833439,
						"acc_stderr,none": 0.04757574964937745,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.004895960832313341,
						"bleu_acc_stderr,none": 5.970576470394393e-06,
						"bleu_diff,none": -0.002394734887517943,
						"bleu_diff_stderr,none": 1.8033929274939632e-06,
						"bleu_max,none": 0.0038278834618436517,
						"bleu_max_stderr,none": 1.3195521685956608e-06,
						"rouge1_acc,none": 0.15422276621787026,
						"rouge1_acc_stderr,none": 0.00015985061837987227,
						"rouge1_diff,none": -0.3522557019355229,
						"rouge1_diff_stderr,none": 0.05516215393746433,
						"rouge1_max,none": 2.3298513472007274,
						"rouge1_max_stderr,none": 0.040920608150566036,
						"rouge2_acc,none": 0.0,
						"rouge2_acc_stderr,none": 0.0,
						"rouge2_diff,none": 0.0,
						"rouge2_diff_stderr,none": 0.0,
						"rouge2_max,none": 0.0,
						"rouge2_max_stderr,none": 0.0,
						"rougeL_acc,none": 0.15422276621787026,
						"rougeL_acc_stderr,none": 0.00015985061837987227,
						"rougeL_diff,none": -0.3367269512259081,
						"rougeL_diff_stderr,none": 0.05504343514765663,
						"rougeL_max,none": 2.299166484737046,
						"rougeL_max_stderr,none": 0.040922732641792046
					},
					"xcopa": {
						"acc,none": 0.5754545454545454,
						"acc_stderr,none": 0.04097748639979641,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.40934404283801873,
						"acc_stderr,none": 0.04372654715273308,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5812526322122616,
						"acc_stderr,none": 0.054082979378632996,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7230838390649584,
						"acc_stderr,none": 0.05993268020348456,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5284667418263811,
						"acc_norm,none": 0.5059188275084555,
						"acc_norm_stderr,none": 0.03744309987127583,
						"acc_stderr,none": 0.04871592125265593,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.341875,
						"acc_stderr,none": 0.014697028808996227,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.336,
						"acc_stderr,none": 0.014944140233795018,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.344,
						"acc_stderr,none": 0.015029633724408947,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.013728421539454878,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3242320819112628,
						"acc_norm,none": 0.35238907849829354,
						"acc_norm_stderr,none": 0.013960142600598684,
						"acc_stderr,none": 0.01367881039951882,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6292087542087542,
						"acc_norm,none": 0.5816498316498316,
						"acc_norm_stderr,none": 0.010122061470742865,
						"acc_stderr,none": 0.009911292822056918,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0109,
						"acc_stderr,none": 0.012720026949248496,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.013,
						"acc_stderr,none": 0.0025335171905233223,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0075,
						"acc_stderr,none": 0.0019296986470519835,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.071,
						"acc_stderr,none": 0.005744214306500112,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0125,
						"acc_stderr,none": 0.00248494717876267,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521438,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0025,
						"acc_stderr,none": 0.0011169148353275319,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.0007069298939339605,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0026030368763557484,
						"acc_stderr,none": 0.001061531641109421,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8386417910447761,
						"acc_stderr,none": 0.13709944964920545,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024968,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045057,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469343,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.813,
						"acc_stderr,none": 0.012336254828074114,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151117,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.797,
						"acc_stderr,none": 0.012726073744598275,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.613,
						"acc_stderr,none": 0.015410011955493932,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.014498627873361427,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.010811655372416051,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329814,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689092,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.00669595667816304,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.0057338361396954505,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571401,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697586,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557419,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274698,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336664,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.012886662332274531,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731975,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.01107581480856704,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525068,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.824,
						"acc_stderr,none": 0.012048616898597502,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767615,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.015818160898606715,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291603,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.827,
						"acc_stderr,none": 0.011967214137559929,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.01417451646148524,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.010811655372416054,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584942,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904636,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706836,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.00801893405031515,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.669,
						"acc_stderr,none": 0.014888272588203934,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.011569479368271336,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.015070604603768408,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.014683991951087966,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.014267009061031314,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.014013292702729479,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.509,
						"acc_stderr,none": 0.015816736995005392,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787735,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244055,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963528024,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426113,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578159,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.749,
						"acc_stderr,none": 0.01371813351688893,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.477,
						"acc_stderr,none": 0.015802554246726098,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410051,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244057,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437642,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617331,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.015726771166750357,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504401,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.011598902298688997,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.675,
						"acc_stderr,none": 0.014818724459095524,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695787,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.00961683333969581,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099187,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.011628164696727188,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163041,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323494,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611465,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.0056518088204523705,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.465,
						"acc_stderr,none": 0.015780495050030156,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.354,
						"acc_stderr,none": 0.015129868238451773,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.5804281345565749,
						"acc_stderr,none": 0.008631175489166728,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.060914490387317256,
						"alias": "cb",
						"f1,none": 0.20745920745920743,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2578008915304606,
						"acc_norm,none": 0.2578008915304606,
						"acc_norm_stderr,none": 0.12471723400851625,
						"acc_stderr,none": 0.12471723400851625,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.06652247352247599,
						"acc_stderr,none": 0.06652247352247599,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757577,
						"acc_stderr,none": 0.07575757575757577,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122592,
						"acc_stderr,none": 0.08503766788122592,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.1702127659574468,
						"acc_norm,none": 0.1702127659574468,
						"acc_norm_stderr,none": 0.055411578656325386,
						"acc_stderr,none": 0.055411578656325386,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.13636363636363635,
						"acc_norm,none": 0.13636363636363635,
						"acc_norm_stderr,none": 0.07488677009526491,
						"acc_stderr,none": 0.07488677009526491,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.05443310539518174,
						"acc_stderr,none": 0.05443310539518174,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.35135135135135137,
						"acc_norm,none": 0.35135135135135137,
						"acc_norm_stderr,none": 0.0795654132101608,
						"acc_stderr,none": 0.0795654132101608,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.11967838846954226,
						"acc_stderr,none": 0.11967838846954226,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.3783783783783784,
						"acc_norm,none": 0.3783783783783784,
						"acc_norm_stderr,none": 0.08083044344561426,
						"acc_stderr,none": 0.08083044344561426,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.07988892740217941,
						"acc_stderr,none": 0.07988892740217941,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.3870967741935484,
						"acc_norm,none": 0.3870967741935484,
						"acc_norm_stderr,none": 0.08892934678767887,
						"acc_stderr,none": 0.08892934678767887,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522561,
						"acc_stderr,none": 0.11369720523522561,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.052631578947368404,
						"acc_stderr,none": 0.052631578947368404,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.14285714285714285,
						"acc_norm,none": 0.14285714285714285,
						"acc_norm_stderr,none": 0.07824607964359515,
						"acc_stderr,none": 0.07824607964359515,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.09176629354822471,
						"acc_stderr,none": 0.09176629354822471,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736842,
						"acc_stderr,none": 0.05263157894736842,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.06007385040937022,
						"acc_stderr,none": 0.06007385040937022,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.1049727762162956,
						"acc_stderr,none": 0.1049727762162956,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.06652247352247599,
						"acc_stderr,none": 0.06652247352247599,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.06603381797442179,
						"acc_stderr,none": 0.06603381797442179,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.07099970268936748,
						"acc_stderr,none": 0.07099970268936748,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.25271973752374394,
						"acc_norm,none": 0.25271973752374394,
						"acc_norm_stderr,none": 0.0386446306799481,
						"acc_stderr,none": 0.0386446306799481,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03571428571428571,
						"acc_stderr,none": 0.03571428571428571,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.03334645408665337,
						"acc_stderr,none": 0.03334645408665337,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.03404916326237584,
						"acc_stderr,none": 0.03404916326237584,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139405,
						"acc_stderr,none": 0.03374402644139405,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.030532597427122114,
						"acc_stderr,none": 0.030532597427122114,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03489370652018761,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.24427480916030533,
						"acc_norm,none": 0.24427480916030533,
						"acc_norm_stderr,none": 0.037683359597287434,
						"acc_stderr,none": 0.037683359597287434,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.27941176470588236,
						"acc_norm,none": 0.27941176470588236,
						"acc_norm_stderr,none": 0.038618823893117264,
						"acc_stderr,none": 0.038618823893117264,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.21495327102803738,
						"acc_norm,none": 0.21495327102803738,
						"acc_norm_stderr,none": 0.03989944463395407,
						"acc_stderr,none": 0.03989944463395407,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2693498452012384,
						"acc_norm,none": 0.2693498452012384,
						"acc_norm_stderr,none": 0.024722089230802043,
						"acc_stderr,none": 0.024722089230802043,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.27450980392156865,
						"acc_norm,none": 0.27450980392156865,
						"acc_norm_stderr,none": 0.03132179803083291,
						"acc_stderr,none": 0.03132179803083291,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.24581005586592178,
						"acc_norm,none": 0.24581005586592178,
						"acc_norm_stderr,none": 0.032272320235413,
						"acc_stderr,none": 0.032272320235413,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.22784810126582278,
						"acc_norm,none": 0.22784810126582278,
						"acc_norm_stderr,none": 0.027303484599069436,
						"acc_stderr,none": 0.027303484599069436,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.04084247315337099,
						"acc_stderr,none": 0.04084247315337099,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.2803738317757009,
						"acc_norm,none": 0.2803738317757009,
						"acc_norm_stderr,none": 0.043628399335701,
						"acc_stderr,none": 0.043628399335701,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.043960933774393765,
						"acc_stderr,none": 0.043960933774393765,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04186091791394607,
						"acc_stderr,none": 0.04186091791394607,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.04117581097845101,
						"acc_stderr,none": 0.04117581097845101,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.041988576623712234,
						"acc_stderr,none": 0.041988576623712234,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.21245421245421245,
						"acc_norm,none": 0.21245421245421245,
						"acc_norm_stderr,none": 0.024801967135031452,
						"acc_stderr,none": 0.024801967135031452,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604236,
						"acc_stderr,none": 0.030587591351604236,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.25146198830409355,
						"acc_norm,none": 0.25146198830409355,
						"acc_norm_stderr,none": 0.033275044238468436,
						"acc_stderr,none": 0.033275044238468436,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.03814280082617515,
						"acc_stderr,none": 0.03814280082617515,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.22302158273381295,
						"acc_norm,none": 0.22302158273381295,
						"acc_norm_stderr,none": 0.035435484995619396,
						"acc_stderr,none": 0.035435484995619396,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.0342292401764445,
						"acc_stderr,none": 0.0342292401764445,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25766871165644173,
						"acc_norm,none": 0.25766871165644173,
						"acc_norm_stderr,none": 0.03436150827846917,
						"acc_stderr,none": 0.03436150827846917,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.03407826167337437,
						"acc_stderr,none": 0.03407826167337437,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2698412698412698,
						"acc_norm,none": 0.2698412698412698,
						"acc_norm_stderr,none": 0.028017279737180052,
						"acc_stderr,none": 0.028017279737180052,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.29292929292929293,
						"acc_norm,none": 0.29292929292929293,
						"acc_norm_stderr,none": 0.03242497958178815,
						"acc_stderr,none": 0.03242497958178815,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.226890756302521,
						"acc_norm,none": 0.226890756302521,
						"acc_norm_stderr,none": 0.027205371538279472,
						"acc_stderr,none": 0.027205371538279472,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26956521739130435,
						"acc_norm,none": 0.26956521739130435,
						"acc_norm_stderr,none": 0.02932276422894952,
						"acc_stderr,none": 0.02932276422894952,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.23703703703703705,
						"acc_norm,none": 0.23703703703703705,
						"acc_norm_stderr,none": 0.03673731683969506,
						"acc_stderr,none": 0.03673731683969506,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695623,
						"acc_stderr,none": 0.03737392962695623,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26136363636363635,
						"acc_norm,none": 0.26136363636363635,
						"acc_norm_stderr,none": 0.03321382551635589,
						"acc_stderr,none": 0.03321382551635589,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2684563758389262,
						"acc_norm,none": 0.2684563758389262,
						"acc_norm_stderr,none": 0.03642722753862902,
						"acc_stderr,none": 0.03642722753862902,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.0327931779226895,
						"acc_stderr,none": 0.0327931779226895,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.2196969696969697,
						"acc_norm,none": 0.2196969696969697,
						"acc_norm_stderr,none": 0.036174957725402315,
						"acc_stderr,none": 0.036174957725402315,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2711864406779661,
						"acc_norm,none": 0.2711864406779661,
						"acc_norm_stderr,none": 0.041100705493392085,
						"acc_stderr,none": 0.041100705493392085,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.21818181818181817,
						"acc_norm,none": 0.21818181818181817,
						"acc_norm_stderr,none": 0.03955932861795833,
						"acc_stderr,none": 0.03955932861795833,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.25874125874125875,
						"acc_norm,none": 0.25874125874125875,
						"acc_norm_stderr,none": 0.036751374389002375,
						"acc_stderr,none": 0.036751374389002375,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2698412698412698,
						"acc_norm,none": 0.2698412698412698,
						"acc_norm_stderr,none": 0.03970158273235172,
						"acc_stderr,none": 0.03970158273235172,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.032530209055933346,
						"acc_stderr,none": 0.032530209055933346,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.20348837209302326,
						"acc_norm,none": 0.20348837209302326,
						"acc_norm_stderr,none": 0.030787030621338977,
						"acc_stderr,none": 0.030787030621338977,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.24817518248175183,
						"acc_norm,none": 0.24817518248175183,
						"acc_norm_stderr,none": 0.021332687690541908,
						"acc_stderr,none": 0.021332687690541908,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.26635514018691586,
						"acc_norm,none": 0.26635514018691586,
						"acc_norm_stderr,none": 0.030288912386133213,
						"acc_stderr,none": 0.030288912386133213,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03887917804888517,
						"acc_stderr,none": 0.03887917804888517,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.28688524590163933,
						"acc_norm,none": 0.28688524590163933,
						"acc_norm_stderr,none": 0.041118866352671805,
						"acc_stderr,none": 0.041118866352671805,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.030231990420749876,
						"acc_stderr,none": 0.030231990420749876,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25555555555555554,
						"acc_norm,none": 0.25555555555555554,
						"acc_norm_stderr,none": 0.032601103040276455,
						"acc_stderr,none": 0.032601103040276455,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.26455026455026454,
						"acc_norm,none": 0.26455026455026454,
						"acc_norm_stderr,none": 0.03217004537697526,
						"acc_stderr,none": 0.03217004537697526,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.04126514736324101,
						"acc_stderr,none": 0.04126514736324101,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.22758620689655173,
						"acc_norm,none": 0.22758620689655173,
						"acc_norm_stderr,none": 0.03493950380131184,
						"acc_stderr,none": 0.03493950380131184,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.03922322702763679,
						"acc_stderr,none": 0.03922322702763679,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.033133343292217204,
						"acc_stderr,none": 0.033133343292217204,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.24644549763033174,
						"acc_norm,none": 0.24644549763033174,
						"acc_norm_stderr,none": 0.02973775172659684,
						"acc_stderr,none": 0.02973775172659684,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.23404255319148937,
						"acc_norm,none": 0.23404255319148937,
						"acc_norm_stderr,none": 0.021864225665813017,
						"acc_stderr,none": 0.021864225665813017,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.028810173508063863,
						"acc_stderr,none": 0.028810173508063863,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.28160919540229884,
						"acc_norm,none": 0.28160919540229884,
						"acc_norm_stderr,none": 0.03419642820708564,
						"acc_stderr,none": 0.03419642820708564,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.037498507091740206,
						"acc_stderr,none": 0.037498507091740206,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.24778761061946902,
						"acc_norm,none": 0.24778761061946902,
						"acc_norm_stderr,none": 0.028781854672921457,
						"acc_stderr,none": 0.028781854672921457,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.033175059300091805,
						"acc_stderr,none": 0.033175059300091805,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336699,
						"acc_stderr,none": 0.03333068663336699,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101964,
						"acc_stderr,none": 0.03334150198101964,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2670807453416149,
						"acc_norm,none": 0.2670807453416149,
						"acc_norm_stderr,none": 0.03497754822823695,
						"acc_stderr,none": 0.03497754822823695,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2125,
						"acc_norm,none": 0.2125,
						"acc_norm_stderr,none": 0.03244189290245474,
						"acc_stderr,none": 0.03244189290245474,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.007054476296006027,
						"mcc_stderr,none": 0.031182468251223224
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.034873508801977704,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.3912772063208108,
						"likelihood_diff_stderr,none": 0.45217241522457285,
						"pct_stereotype,none": 0.5629099582587955,
						"pct_stereotype_stderr,none": 0.08194503289161058
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.433139534883721,
						"likelihood_diff_stderr,none": 0.08505497667306373,
						"pct_stereotype,none": 0.6195587358378056,
						"pct_stereotype_stderr,none": 0.011858999298863531
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.802197802197802,
						"likelihood_diff_stderr,none": 0.408556911272371,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.04865042554105199
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.420454545454546,
						"likelihood_diff_stderr,none": 2.1015600757689787,
						"pct_stereotype,none": 0.6363636363636364,
						"pct_stereotype_stderr,none": 0.15212000482437738
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.380769230769231,
						"likelihood_diff_stderr,none": 0.6078440505531822,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.05769230769230768
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.57734375,
						"likelihood_diff_stderr,none": 0.17274133662443392,
						"pct_stereotype,none": 0.559375,
						"pct_stereotype_stderr,none": 0.027796540761244683
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.2708333333333335,
						"likelihood_diff_stderr,none": 0.22124096987854508,
						"pct_stereotype,none": 0.5879629629629629,
						"pct_stereotype_stderr,none": 0.03356787758160831
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.006944444444445,
						"likelihood_diff_stderr,none": 0.3444884557171887,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.0807086614173227,
						"likelihood_diff_stderr,none": 0.14085882331518929,
						"pct_stereotype,none": 0.5433070866141733,
						"pct_stereotype_stderr,none": 0.022122328731374527
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.494369369369369,
						"likelihood_diff_stderr,none": 0.3571308631118962,
						"pct_stereotype,none": 0.7207207207207207,
						"pct_stereotype_stderr,none": 0.0427766252488144
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.42741935483871,
						"likelihood_diff_stderr,none": 0.40675741309720803,
						"pct_stereotype,none": 0.8602150537634409,
						"pct_stereotype_stderr,none": 0.036152622588464155
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 3.960526315789474,
						"likelihood_diff_stderr,none": 0.22675935481241669,
						"pct_stereotype,none": 0.6631578947368421,
						"pct_stereotype_stderr,none": 0.03437880340748324
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.3470483005366725,
						"likelihood_diff_stderr,none": 0.07910152333259726,
						"pct_stereotype,none": 0.507453786523554,
						"pct_stereotype_stderr,none": 0.012211942027483493
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.1444444444444444,
						"likelihood_diff_stderr,none": 0.29491179667487927,
						"pct_stereotype,none": 0.4777777777777778,
						"pct_stereotype_stderr,none": 0.05294752255076824
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.1923076923076925,
						"likelihood_diff_stderr,none": 0.43037126763747774,
						"pct_stereotype,none": 0.38461538461538464,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.863636363636363,
						"likelihood_diff_stderr,none": 0.45628164922580017,
						"pct_stereotype,none": 0.7424242424242424,
						"pct_stereotype_stderr,none": 0.054240275510565296
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.8473520249221185,
						"likelihood_diff_stderr,none": 0.15627983357620062,
						"pct_stereotype,none": 0.5264797507788161,
						"pct_stereotype_stderr,none": 0.02791162519893664
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.5316205533596836,
						"likelihood_diff_stderr,none": 0.1988253681061026,
						"pct_stereotype,none": 0.3675889328063241,
						"pct_stereotype_stderr,none": 0.030372509322709233
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.6493055555555554,
						"likelihood_diff_stderr,none": 0.45260666206685046,
						"pct_stereotype,none": 0.625,
						"pct_stereotype_stderr,none": 0.05745481997211521
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.2730978260869565,
						"likelihood_diff_stderr,none": 0.15581675834000067,
						"pct_stereotype,none": 0.38913043478260867,
						"pct_stereotype_stderr,none": 0.022757025753631196
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.2195652173913043,
						"likelihood_diff_stderr,none": 0.31404022152728844,
						"pct_stereotype,none": 0.591304347826087,
						"pct_stereotype_stderr,none": 0.04604188749503789
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.71978021978022,
						"likelihood_diff_stderr,none": 0.3009659653246675,
						"pct_stereotype,none": 0.8131868131868132,
						"pct_stereotype_stderr,none": 0.04108446855035883
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.5251913265306123,
						"likelihood_diff_stderr,none": 0.25373992359219855,
						"pct_stereotype,none": 0.6275510204081632,
						"pct_stereotype_stderr,none": 0.03462107977939841
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.01624015748031496,
						"exact_match_stderr,none": 0.0028046889385479907
					},
					"glue": {
						"acc,none": 0.5359909327922994,
						"acc_stderr,none": 0.0912549478984433,
						"alias": "glue",
						"f1,none": 0.3062320791999928,
						"f1_stderr,none": 0.0023293639115965024,
						"mcc,none": 0.007054476296006027,
						"mcc_stderr,none": 0.0009723463262385443
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.0,
						"exact_match_stderr,get-answer": 0.0
					},
					"hellaswag": {
						"acc,none": 0.4444333798048198,
						"acc_norm,none": 0.5878311093407688,
						"acc_norm_stderr,none": 0.004912192800263312,
						"acc_stderr,none": 0.004958872288442145,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2950043315044758,
						"acc_norm,none": 0.2950043315044758,
						"acc_norm_stderr,none": 0.03006470095610117,
						"acc_stderr,none": 0.03006470095610117,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.040936018074033256,
						"acc_stderr,none": 0.040936018074033256,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.315,
						"acc_norm,none": 0.315,
						"acc_norm_stderr,none": 0.014696631960792505,
						"acc_stderr,none": 0.014696631960792505,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.314,
						"acc_norm,none": 0.314,
						"acc_norm_stderr,none": 0.014683991951087967,
						"acc_stderr,none": 0.014683991951087967,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774166,
						"acc_stderr,none": 0.013877773329774166,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.294,
						"acc_norm,none": 0.294,
						"acc_norm_stderr,none": 0.014414290540008224,
						"acc_stderr,none": 0.014414290540008224,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.2733333333333333,
						"acc_norm,none": 0.2733333333333333,
						"acc_norm_stderr,none": 0.01820960423827394,
						"acc_stderr,none": 0.01820960423827394,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.329,
						"acc_norm,none": 0.329,
						"acc_norm_stderr,none": 0.01486539538592836,
						"acc_stderr,none": 0.01486539538592836,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.322,
						"acc_norm,none": 0.322,
						"acc_norm_stderr,none": 0.01478291360099666,
						"acc_stderr,none": 0.01478291360099666,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.339,
						"acc_norm,none": 0.339,
						"acc_norm_stderr,none": 0.014976758771620344,
						"acc_stderr,none": 0.014976758771620344,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.030275120389073044,
						"acc_stderr,none": 0.030275120389073044,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.314,
						"acc_norm,none": 0.314,
						"acc_norm_stderr,none": 0.014683991951087955,
						"acc_stderr,none": 0.014683991951087955,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.24615384615384617,
						"acc_norm,none": 0.24615384615384617,
						"acc_norm_stderr,none": 0.03792711596479615,
						"acc_stderr,none": 0.03792711596479615,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.046056618647183814,
						"acc_stderr,none": 0.046056618647183814,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.324,
						"acc_norm,none": 0.324,
						"acc_norm_stderr,none": 0.01480686473373886,
						"acc_stderr,none": 0.01480686473373886,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.314,
						"acc_norm,none": 0.314,
						"acc_norm_stderr,none": 0.014683991951087955,
						"acc_stderr,none": 0.014683991951087955,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.289,
						"acc_norm,none": 0.289,
						"acc_norm_stderr,none": 0.014341711358296184,
						"acc_stderr,none": 0.014341711358296184,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.314,
						"acc_norm,none": 0.314,
						"acc_norm_stderr,none": 0.014683991951087962,
						"acc_stderr,none": 0.014683991951087962,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.014221154708434935,
						"acc_stderr,none": 0.014221154708434935,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.01409502286871759,
						"acc_stderr,none": 0.01409502286871759,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.321,
						"acc_norm,none": 0.321,
						"acc_norm_stderr,none": 0.014770821817934649,
						"acc_stderr,none": 0.014770821817934649,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.325,
						"acc_norm,none": 0.325,
						"acc_norm_stderr,none": 0.014818724459095526,
						"acc_stderr,none": 0.014818724459095526,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816506,
						"acc_stderr,none": 0.04229525846816506,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.312,
						"acc_norm,none": 0.312,
						"acc_norm_stderr,none": 0.014658474370509001,
						"acc_stderr,none": 0.014658474370509001,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.32,
						"acc_norm,none": 0.32,
						"acc_norm_stderr,none": 0.014758652303574885,
						"acc_stderr,none": 0.014758652303574885,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.299,
						"acc_norm,none": 0.299,
						"acc_norm_stderr,none": 0.014484778521220484,
						"acc_stderr,none": 0.014484778521220484,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750638,
						"acc_stderr,none": 0.013626065817750638,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.308,
						"acc_norm,none": 0.308,
						"acc_norm_stderr,none": 0.014606483127342761,
						"acc_stderr,none": 0.014606483127342761,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.013736254390651136,
						"acc_stderr,none": 0.013736254390651136,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.28833333333333333,
						"acc_norm,none": 0.28833333333333333,
						"acc_norm_stderr,none": 0.018508547058789338,
						"acc_stderr,none": 0.018508547058789338,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920833,
						"acc_stderr,none": 0.013512312258920833,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.321,
						"acc_norm,none": 0.321,
						"acc_norm_stderr,none": 0.01477082181793464,
						"acc_stderr,none": 0.01477082181793464,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.298,
						"acc_norm,none": 0.298,
						"acc_norm_stderr,none": 0.014470846741134712,
						"acc_stderr,none": 0.014470846741134712,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.289,
						"acc_norm,none": 0.289,
						"acc_norm_stderr,none": 0.01434171135829618,
						"acc_stderr,none": 0.01434171135829618,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.24333333333333335,
						"acc_norm,none": 0.24333333333333335,
						"acc_norm_stderr,none": 0.02481518457232592,
						"acc_stderr,none": 0.02481518457232592,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.013736254390651145,
						"acc_stderr,none": 0.013736254390651145,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.313,
						"acc_norm,none": 0.313,
						"acc_norm_stderr,none": 0.014671272822977885,
						"acc_stderr,none": 0.014671272822977885,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.293,
						"acc_norm,none": 0.293,
						"acc_norm_stderr,none": 0.014399942998441271,
						"acc_stderr,none": 0.014399942998441271,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.030275120389073044,
						"acc_stderr,none": 0.030275120389073044,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.014236526215291345,
						"acc_stderr,none": 0.014236526215291345,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740668,
						"acc_stderr,none": 0.014142984975740668,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.030275120389073044,
						"acc_stderr,none": 0.030275120389073044,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.014566646394664384,
						"acc_stderr,none": 0.014566646394664384,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5003288752466565,
						"acc_norm,none": 0.448,
						"acc_norm_stderr,none": 0.0004955831663326678,
						"acc_stderr,none": 0.044495448642276544,
						"alias": "kobest",
						"f1,none": 0.40373553734304096,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.015663503610155283,
						"alias": " - kobest_copa",
						"f1,none": 0.5688341274807078,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.37,
						"acc_norm,none": 0.448,
						"acc_norm_stderr,none": 0.022261697292270143,
						"acc_stderr,none": 0.02161328916516578,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3670506908642933,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5214105793450882,
						"acc_stderr,none": 0.02510289869636305,
						"alias": " - kobest_sentineg",
						"f1,none": 0.5200676982591876,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6280807296720357,
						"acc_stderr,none": 0.01590021763812435,
						"alias": "lambada",
						"perplexity,none": 5.4995175027345065,
						"perplexity_stderr,none": 0.4154073293845765
					},
					"lambada_cloze": {
						"acc,none": 0.007665437609159713,
						"acc_stderr,none": 0.0012910586004762218,
						"alias": "lambada_cloze",
						"perplexity,none": 910.1699637679841,
						"perplexity_stderr,none": 34.49916941348411
					},
					"lambada_multilingual": {
						"acc,none": 0.43361148845332814,
						"acc_stderr,none": 0.06532621311621811,
						"alias": "lambada_multilingual",
						"perplexity,none": 48.10918047618764,
						"perplexity_stderr,none": 14.310185712534913
					},
					"lambada_openai": {
						"acc,none": 0.6568988938482437,
						"acc_stderr,none": 0.006614124982461026,
						"alias": " - lambada_openai",
						"perplexity,none": 4.711549267902551,
						"perplexity_stderr,none": 0.10507146167698257
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.0067921599068503785,
						"acc_stderr,none": 0.0011442899754321983,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 940.8414493286095,
						"perplexity_stderr,none": 33.43334335038908
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3421307975936348,
						"acc_stderr,none": 0.006609641974316326,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 67.73007947426339,
						"perplexity_stderr,none": 4.020695424655283
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6567048321366195,
						"acc_stderr,none": 0.00661501790443367,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.710104607100214,
						"perplexity_stderr,none": 0.10500422579610087
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.356685425965457,
						"acc_stderr,none": 0.006673696468046189,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 62.295199978936665,
						"perplexity_stderr,none": 3.336995185550062
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.42577139530370656,
						"acc_stderr,none": 0.006888786490936467,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 40.728011477428275,
						"perplexity_stderr,none": 2.1981878515254576
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.38676499126722297,
						"acc_stderr,none": 0.006784988579985178,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 65.08250684320967,
						"perplexity_stderr,none": 3.8577206751366395
					},
					"lambada_standard": {
						"acc,none": 0.5992625654958277,
						"acc_stderr,none": 0.00682732542760388,
						"alias": " - lambada_standard",
						"perplexity,none": 6.287980916207457,
						"perplexity_stderr,none": 0.1524144996900687
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.008538715311469047,
						"acc_stderr,none": 0.0012818766004755566,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 879.4984782073585,
						"perplexity_stderr,none": 28.149007739922617
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.2627226463104326,
						"exact_match_stderr,get-answer": 0.01110391451342142
					},
					"logiqa": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.2534562211981567,
						"acc_norm_stderr,none": 0.01706170543978573,
						"acc_stderr,none": 0.016399713788445076,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.25254452926208654,
						"acc_norm,none": 0.2792620865139949,
						"acc_norm_stderr,none": 0.011318961450567874,
						"acc_stderr,none": 0.010961589961715616,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2522613065326633,
						"acc_norm,none": 0.2556113902847571,
						"acc_norm_stderr,none": 0.007985287397847436,
						"acc_stderr,none": 0.007950617098798796,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.48813810633340393,
						"acc_stderr,none": 0.005144447703638847,
						"alias": "mc_taco",
						"f1,none": 0.40808328230251073,
						"f1_stderr,none": 0.006889968589139777
					},
					"medmcqa": {
						"acc,none": 0.26464260100406406,
						"acc_norm,none": 0.26464260100406406,
						"acc_norm_stderr,none": 0.006821613307365156,
						"acc_stderr,none": 0.006821613307365156,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.2545168892380204,
						"acc_norm,none": 0.2545168892380204,
						"acc_norm_stderr,none": 0.012213317633567465,
						"acc_stderr,none": 0.012213317633567465,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.2511038313630537,
						"acc_stderr,none": 0.040348222828877914,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.26666666666666666,
						"acc_stderr,none": 0.03820169914517905,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.19736842105263158,
						"acc_stderr,none": 0.03238981601699397,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909282,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2641509433962264,
						"acc_stderr,none": 0.027134291628741702,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.22916666666666666,
						"acc_stderr,none": 0.035146974678623884,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322695,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909282,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2023121387283237,
						"acc_stderr,none": 0.03063114553919882,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.043898699568087785,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909283,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.32340425531914896,
						"acc_stderr,none": 0.030579442773610334,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2543859649122807,
						"acc_stderr,none": 0.04096985139843671,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.03724563619774632,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2619047619047619,
						"acc_stderr,none": 0.022644212615525218,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.24603174603174602,
						"acc_stderr,none": 0.03852273364924315,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2709677419354839,
						"acc_stderr,none": 0.02528441611490016,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.28078817733990147,
						"acc_stderr,none": 0.03161856335358611,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768079,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.24848484848484848,
						"acc_stderr,none": 0.03374402644139405,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.02962022787479049,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.21761658031088082,
						"acc_stderr,none": 0.029778663037752954,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.24102564102564103,
						"acc_stderr,none": 0.021685546665333195,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3111111111111111,
						"acc_stderr,none": 0.028226446749683515,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.23109243697478993,
						"acc_stderr,none": 0.027381406927868973,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.24503311258278146,
						"acc_stderr,none": 0.035118075718047245,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.24036697247706423,
						"acc_stderr,none": 0.01832060732096407,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.1574074074074074,
						"acc_stderr,none": 0.024837173518242397,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.22058823529411764,
						"acc_stderr,none": 0.02910225438967409,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2616033755274262,
						"acc_stderr,none": 0.028609516716994934,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3632286995515695,
						"acc_stderr,none": 0.03227790442850499,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2366412213740458,
						"acc_stderr,none": 0.03727673575596918,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.24420828905419767,
						"acc_stderr,none": 0.026694910508076663,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.256198347107438,
						"acc_stderr,none": 0.03984979653302871,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.04453197507374984,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.22085889570552147,
						"acc_stderr,none": 0.03259177392742177,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2767857142857143,
						"acc_stderr,none": 0.042466243366976256,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2524271844660194,
						"acc_stderr,none": 0.04301250399690878,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2606837606837607,
						"acc_stderr,none": 0.028760348956523414,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.29118773946360155,
						"acc_stderr,none": 0.01624608706970139,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.24566473988439305,
						"acc_stderr,none": 0.023176298203992005,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24134078212290502,
						"acc_stderr,none": 0.014310999547961447,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.238562091503268,
						"acc_stderr,none": 0.024404394928087873,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2687479884132604,
						"acc_stderr,none": 0.045108539914000226,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2604501607717042,
						"acc_stderr,none": 0.02492672322484555,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2654320987654321,
						"acc_stderr,none": 0.024569223600460842,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.24822695035460993,
						"acc_stderr,none": 0.025770015644290396,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.23402868318122555,
						"acc_stderr,none": 0.01081358555265968,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.19117647058823528,
						"acc_stderr,none": 0.02388688192244036,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.25980392156862747,
						"acc_stderr,none": 0.01774089950917779,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.34545454545454546,
						"acc_stderr,none": 0.04554619617541054,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.15918367346938775,
						"acc_stderr,none": 0.023420972069166355,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2378940526486838,
						"acc_stderr,none": 0.03781927975280289,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.23880597014925373,
						"acc_stderr,none": 0.030147775935409214,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2568981921979067,
						"acc_stderr,none": 0.04989991517141305,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.0416333199893227,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3313253012048193,
						"acc_stderr,none": 0.03664314777288085,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.25146198830409355,
						"acc_stderr,none": 0.033275044238468436,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.37554763117677026,
						"acc_stderr,none": 0.004888314567268809,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.367473555736371,
						"acc_stderr,none": 0.004862432004413269,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6617647058823529,
						"acc_stderr,none": 0.023451145303506664,
						"alias": "mrpc",
						"f1,none": 0.7958579881656804,
						"f1_stderr,none": 0.016990405880120924
					},
					"multimedqa": {
						"acc,none": 0.2878637331440738,
						"acc_norm,none": 0.2626469598051318,
						"acc_norm_stderr,none": 8.778614059200267e-05,
						"acc_stderr,none": 0.10037047258687279,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5719884488448845,
						"acc_stderr,none": 0.007106976252751528,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6756019581521338,
						"mrr_stderr,none": 0.010315464210852446,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.43002257336343114,
						"r@2_stderr,none": 0.01664189661349174
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6324304007582416,
						"mrr_stderr,none": 0.010347029128143814,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4650112866817156,
						"r@2_stderr,none": 0.016766114263692605
					},
					"openbookqa": {
						"acc,none": 0.258,
						"acc_norm,none": 0.36,
						"acc_norm_stderr,none": 0.021487751089720522,
						"acc_stderr,none": 0.019586711785215837,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4985,
						"acc_stderr,none": 0.011183085696839198,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.484,
						"acc_stderr,none": 0.011177408788874896,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4895,
						"acc_stderr,none": 0.011180669867648657,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5435,
						"acc_stderr,none": 0.011140733053371404,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5245,
						"acc_stderr,none": 0.011169702598013184,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5125,
						"acc_stderr,none": 0.011179640744835738,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5095,
						"acc_stderr,none": 0.011181117282805218,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5088571428571429,
						"acc_stderr,none": 0.01892792045987846,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7236126224156693,
						"acc_norm,none": 0.721436343852013,
						"acc_norm_stderr,none": 0.01045939723596517,
						"acc_stderr,none": 0.010434162388275624,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2487724167378309,
						"acc_norm,none": 0.26857386848847137,
						"acc_norm_stderr,none": 0.0032381000604978986,
						"acc_stderr,none": 0.00315834833520192,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.676,
						"acc_stderr,none": 0.020950557312477455,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7199914268735376,
						"acc_norm,none": 0.5121286006206975,
						"acc_norm_stderr,none": 0.0038348366896563705,
						"acc_stderr,none": 0.1371852338534413,
						"alias": "pythia",
						"bits_per_byte,none": 0.7220552283260857,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6495302422700988,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.711549267902551,
						"perplexity_stderr,none": 0.10507146167698257,
						"word_perplexity,none": 14.531932778850274,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.30851063829787234,
						"acc_norm,none": 0.38652482269503546,
						"acc_norm_stderr,none": 0.045679080146263956,
						"acc_stderr,none": 0.041197927799664004,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.475,
						"acc_norm_stderr,none": 0.04577759534198058,
						"acc_stderr,none": 0.04321358157014425,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.2375,
						"acc_norm,none": 0.33125,
						"acc_norm_stderr,none": 0.03732598513993524,
						"acc_stderr,none": 0.03374839851779222,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3380281690140845,
						"acc_norm,none": 0.38028169014084506,
						"acc_norm_stderr,none": 0.028857363751758295,
						"acc_stderr,none": 0.028119201465363827,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.49093904448105435,
						"acc_stderr,none": 0.006764299567764275,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.6180806331931734,
						"acc_stderr,none": 0.0024163615085664044,
						"alias": "qqp",
						"f1,none": 0.3018492562282407,
						"f1_stderr,none": 0.004024039399820445
					},
					"race": {
						"acc,none": 0.33014354066985646,
						"acc_stderr,none": 0.014554323633246914,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5812274368231047,
						"acc_stderr,none": 0.029696661081234824,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.876,
						"acc_norm,none": 0.83,
						"acc_norm_stderr,none": 0.01188449583454167,
						"acc_stderr,none": 0.010427498872343956,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5812274368231047,
						"acc_stderr,none": 0.029696661081234824,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.5619266055045872,
						"acc_stderr,none": 0.016811410738961592,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5462861141657502,
						"acc_norm,none": 0.7413276017194842,
						"acc_norm_stderr,none": 0.003096070577225409,
						"acc_stderr,none": 0.0035199122625693352,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5339589364746598,
						"acc_stderr,none": 0.024148667232371378,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5001001602564102,
						"acc_stderr,none": 0.005004255326032081,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.5934934630586804,
						"acc_stderr,none": 0.004945055625920964,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5095098039215686,
						"acc_stderr,none": 0.004950084837550716,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3106305943833439,
						"acc_stderr,none": 0.04757574964937745,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.004895960832313341,
						"bleu_acc_stderr,none": 5.970576470394393e-06,
						"bleu_diff,none": -0.002394734887517943,
						"bleu_diff_stderr,none": 1.8033929274939632e-06,
						"bleu_max,none": 0.0038278834618436517,
						"bleu_max_stderr,none": 1.3195521685956608e-06,
						"rouge1_acc,none": 0.15422276621787026,
						"rouge1_acc_stderr,none": 0.00015985061837987227,
						"rouge1_diff,none": -0.3522557019355229,
						"rouge1_diff_stderr,none": 0.05516215393746433,
						"rouge1_max,none": 2.3298513472007274,
						"rouge1_max_stderr,none": 0.040920608150566036,
						"rouge2_acc,none": 0.0,
						"rouge2_acc_stderr,none": 0.0,
						"rouge2_diff,none": 0.0,
						"rouge2_diff_stderr,none": 0.0,
						"rouge2_max,none": 0.0,
						"rouge2_max_stderr,none": 0.0,
						"rougeL_acc,none": 0.15422276621787026,
						"rougeL_acc_stderr,none": 0.00015985061837987227,
						"rougeL_diff,none": -0.3367269512259081,
						"rougeL_diff_stderr,none": 0.05504343514765663,
						"rougeL_max,none": 2.299166484737046,
						"rougeL_max_stderr,none": 0.040922732641792046
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.004895960832313341,
						"bleu_acc_stderr,none": 0.002443476308539617,
						"bleu_diff,none": -0.002394734887517943,
						"bleu_diff_stderr,none": 0.0013429046606121982,
						"bleu_max,none": 0.0038278834618436517,
						"bleu_max_stderr,none": 0.0011487176191717705,
						"rouge1_acc,none": 0.15422276621787026,
						"rouge1_acc_stderr,none": 0.01264320443478916,
						"rouge1_diff,none": -0.3522557019355229,
						"rouge1_diff_stderr,none": 0.2348662469097344,
						"rouge1_max,none": 2.3298513472007274,
						"rouge1_max_stderr,none": 0.2022884281182837,
						"rouge2_acc,none": 0.0,
						"rouge2_acc_stderr,none": 0.0,
						"rouge2_diff,none": 0.0,
						"rouge2_diff_stderr,none": 0.0,
						"rouge2_max,none": 0.0,
						"rouge2_max_stderr,none": 0.0,
						"rougeL_acc,none": 0.15422276621787026,
						"rougeL_acc_stderr,none": 0.01264320443478916,
						"rougeL_diff,none": -0.3367269512259081,
						"rougeL_diff_stderr,none": 0.234613373761294,
						"rougeL_max,none": 2.299166484737046,
						"rougeL_max_stderr,none": 0.20229367919386915
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2141982864137087,
						"acc_stderr,none": 0.014362148155690467,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3589834413715965,
						"acc_stderr,none": 0.013826828497578412,
						"alias": "truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.01624015748031496,
						"exact_match_stderr,none": 0.0028046889385479907
					},
					"wic": {
						"acc,none": 0.5015673981191222,
						"acc_stderr,none": 0.019810623954060382,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.7220552283260857,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6495302422700988,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 14.531932778850274,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.574585635359116,
						"acc_stderr,none": 0.013895257666646382,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4225352112676056,
						"acc_stderr,none": 0.05903984205682581,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.5576923076923077,
						"acc_stderr,none": 0.04893740777701,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7619047619047619,
						"acc_stderr,none": 0.025825054502221036,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5754545454545454,
						"acc_stderr,none": 0.04097748639979641,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.022242244375731017,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.66,
						"acc_stderr,none": 0.021206117013673066,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.628,
						"acc_stderr,none": 0.021637197985722396,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928028,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.02226169729227013,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.022175109265613162,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.02223197069632112,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.021966635293832915,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.021772369465547198,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.40934404283801873,
						"acc_stderr,none": 0.04372654715273308,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3353413654618474,
						"acc_stderr,none": 0.009463034891512703,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.40883534136546185,
						"acc_stderr,none": 0.009854078067810775,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4506024096385542,
						"acc_stderr,none": 0.009973042774811678,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3650602409638554,
						"acc_stderr,none": 0.009650194822749628,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5140562248995983,
						"acc_stderr,none": 0.010018111813088548,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4566265060240964,
						"acc_stderr,none": 0.009984293410840315,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.470281124497992,
						"acc_stderr,none": 0.010004353982613848,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.38714859437751004,
						"acc_stderr,none": 0.009763465328590648,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4465863453815261,
						"acc_stderr,none": 0.00996472245735877,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3642570281124498,
						"acc_stderr,none": 0.009645667910246838,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.38714859437751004,
						"acc_stderr,none": 0.009763465328590652,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.41646586345381525,
						"acc_stderr,none": 0.009881215932115996,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.37269076305220883,
						"acc_stderr,none": 0.009691761259693465,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42048192771084336,
						"acc_stderr,none": 0.00989451955110578,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.344578313253012,
						"acc_stderr,none": 0.009525590900110653,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5812526322122616,
						"acc_stderr,none": 0.054082979378632996,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5367306419589676,
						"acc_stderr,none": 0.012832359240206969,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7193911317008603,
						"acc_stderr,none": 0.011562314078147744,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6446062210456651,
						"acc_stderr,none": 0.012317247930418374,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5228325612177366,
						"acc_stderr,none": 0.012853702384870849,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5526141628060887,
						"acc_stderr,none": 0.012795688167385286,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6128391793514228,
						"acc_stderr,none": 0.012535177511067376,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5029781601588352,
						"acc_stderr,none": 0.012866897066011225,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6055592322964924,
						"acc_stderr,none": 0.012577106513936133,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5261416280608868,
						"acc_stderr,none": 0.012849526888044208,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5704831237590999,
						"acc_stderr,none": 0.012738639381354,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.599602911978822,
						"acc_stderr,none": 0.012609238175551173,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7230838390649584,
						"acc_stderr,none": 0.05993268020348456,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8159139784946237,
						"acc_stderr,none": 0.008039231425138254,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.5783132530120482,
						"acc_stderr,none": 0.05453428485295111,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6006256517205423,
						"acc_stderr,none": 0.015823744684528594,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6615969581749049,
						"acc_stderr,none": 0.02923231657730264,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.02764654065504541,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6607142857142857,
						"acc_stderr,none": 0.021110846258645333,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "RWKV/rwkv-4-world-3b"
	},
	"RWKV/rwkv-4-world-7b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5636978579481398,
						"acc_norm,none": 0.5448139797068771,
						"acc_norm_stderr,none": 0.03981516298484297,
						"acc_stderr,none": 0.05318960366849133,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3553125,
						"acc_stderr,none": 0.016096060042583325,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.1851,
						"acc_stderr,none": 0.1863005966011617,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8381044776119403,
						"acc_stderr,none": 0.1437098958193394,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.24665676077265974,
						"acc_norm,none": 0.24665676077265974,
						"acc_norm_stderr,none": 0.11933299304240107,
						"acc_stderr,none": 0.11933299304240107,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2587635986876187,
						"acc_norm,none": 0.2587635986876187,
						"acc_norm_stderr,none": 0.0393099515325355,
						"acc_stderr,none": 0.0393099515325355,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.2936419200954083,
						"likelihood_diff_stderr,none": 0.47620486364168096,
						"pct_stereotype,none": 0.5736434108527132,
						"pct_stereotype_stderr,none": 0.08125104249263165
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.022637795275590553,
						"exact_match_stderr,none": 0.0033005770276179373
					},
					"glue": {
						"acc,none": 0.5636752269105417,
						"acc_stderr,none": 0.08286104931377031,
						"alias": "glue",
						"f1,none": 0.4166933228499848,
						"f1_stderr,none": 0.001562521811103853,
						"mcc,none": 0.01348864658799917,
						"mcc_stderr,none": 0.001017436146617974
					},
					"kmmlu": {
						"acc,none": 0.29659254981230143,
						"acc_norm,none": 0.29659254981230143,
						"acc_norm_stderr,none": 0.033980234399003946,
						"acc_stderr,none": 0.033980234399003946,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.516334137250603,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.0005010020040080159,
						"acc_stderr,none": 0.058979921873439374,
						"alias": "kobest",
						"f1,none": 0.41741847040164487,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6676693188433922,
						"acc_stderr,none": 0.01689472165812205,
						"alias": "lambada",
						"perplexity,none": 4.582669382377997,
						"perplexity_stderr,none": 0.3358079026952327
					},
					"lambada_cloze": {
						"acc,none": 0.03415486124587619,
						"acc_stderr,none": 0.0027851796484193573,
						"alias": "lambada_cloze",
						"perplexity,none": 262.2070425489701,
						"perplexity_stderr,none": 20.08698851462437
					},
					"lambada_multilingual": {
						"acc,none": 0.473665825732583,
						"acc_stderr,none": 0.06613832944832002,
						"alias": "lambada_multilingual",
						"perplexity,none": 33.10052741124072,
						"perplexity_stderr,none": 9.7081463992688
					},
					"mmlu": {
						"acc,none": 0.2584389688078621,
						"acc_stderr,none": 0.03769663611577144,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.24760892667375134,
						"acc_stderr,none": 0.027374386633605065,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.27647248149340203,
						"acc_stderr,none": 0.03588623150924139,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.25804354891127723,
						"acc_stderr,none": 0.0377472982452215,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.25721535045987953,
						"acc_stderr,none": 0.04803643340995113,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2858765081618169,
						"acc_norm,none": 0.24859762943954988,
						"acc_norm_stderr,none": 9.71291553985608e-05,
						"acc_stderr,none": 0.10989543345188667,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5206428571428573,
						"acc_stderr,none": 0.020526266658686284,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7255285413084764,
						"acc_norm,none": 0.5504572903781023,
						"acc_norm_stderr,none": 0.004127389115696987,
						"acc_stderr,none": 0.1402185213485563,
						"alias": "pythia",
						"bits_per_byte,none": 0.6821754804612891,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6045574892397707,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.940129642053726,
						"perplexity_stderr,none": 0.08144039495949996,
						"word_perplexity,none": 12.535083566781024,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3102836879432624,
						"acc_norm,none": 0.3723404255319149,
						"acc_norm_stderr,none": 0.04444241943937857,
						"acc_stderr,none": 0.035024147368704364,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5005823433496389,
						"acc_stderr,none": 0.010906418362338145,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.2840977451542544,
						"acc_stderr,none": 0.001030773388837267,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.006119951040391677,
						"bleu_acc_stderr,none": 0.0027302089178066944,
						"bleu_diff,none": 0.0008121943067088997,
						"bleu_diff_stderr,none": 0.0009190496069811475,
						"bleu_max,none": 0.014669252725972394,
						"bleu_max_stderr,none": 0.0021873048191628336,
						"rouge1_acc,none": 0.08567931456548347,
						"rouge1_acc_stderr,none": 0.009798107161456841,
						"rouge1_diff,none": 0.08075047582228334,
						"rouge1_diff_stderr,none": 0.22057176839090892,
						"rouge1_max,none": 2.470406639678096,
						"rouge1_max_stderr,none": 0.23793324013893466,
						"rouge2_acc,none": 0.0,
						"rouge2_acc_stderr,none": 0.0,
						"rouge2_diff,none": 0.0,
						"rouge2_diff_stderr,none": 0.0,
						"rouge2_max,none": 0.0,
						"rouge2_max_stderr,none": 0.0,
						"rougeL_acc,none": 0.0832313341493268,
						"rougeL_acc_stderr,none": 0.009670039081592304,
						"rougeL_diff,none": 0.07925279528550083,
						"rougeL_diff_stderr,none": 0.22051394405368874,
						"rougeL_max,none": 2.4619884299823385,
						"rougeL_max_stderr,none": 0.23793568204894983
					},
					"xcopa": {
						"acc,none": 0.6014545454545455,
						"acc_stderr,none": 0.053116283731232235,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.41204819277108434,
						"acc_stderr,none": 0.040764307073034814,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.608627639732868,
						"acc_stderr,none": 0.058880111354957666,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7651157563497415,
						"acc_stderr,none": 0.050424154305431894,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5636978579481398,
						"acc_norm,none": 0.5448139797068771,
						"acc_norm_stderr,none": 0.03981516298484297,
						"acc_stderr,none": 0.05318960366849133,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3553125,
						"acc_stderr,none": 0.016096060042583325,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.338,
						"acc_stderr,none": 0.014965960710224489,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.359,
						"acc_stderr,none": 0.015177264224798597,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.36666666666666664,
						"acc_stderr,none": 0.013916893275819938,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3395904436860068,
						"acc_norm,none": 0.38054607508532423,
						"acc_norm_stderr,none": 0.014188277712349824,
						"acc_stderr,none": 0.013839039762820164,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6742424242424242,
						"acc_norm,none": 0.6258417508417509,
						"acc_norm_stderr,none": 0.009929516948977625,
						"acc_stderr,none": 0.009616642976885968,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.1851,
						"acc_stderr,none": 0.1863005966011617,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.1155,
						"acc_stderr,none": 0.0071488060341470035,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.579,
						"acc_stderr,none": 0.011042665902539784,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.4095,
						"acc_stderr,none": 0.010998425236316457,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.5895,
						"acc_stderr,none": 0.011002518016406627,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.074,
						"acc_stderr,none": 0.00585483898752009,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.068,
						"acc_stderr,none": 0.005630617366325326,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.009,
						"acc_stderr,none": 0.002112280962711326,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0045,
						"acc_stderr,none": 0.0014969954902233325,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521454,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000085,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8381044776119403,
						"acc_stderr,none": 0.1437098958193394,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329822,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469343,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.012559527926707365,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.00932045443478321,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.013232501619085344,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.627,
						"acc_stderr,none": 0.015300493622922814,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.736,
						"acc_stderr,none": 0.013946271849440481,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.01110798754893915,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448847,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319422,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666673,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426102,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.0068954729748978965,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.00868051561552371,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796384,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565743,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724437,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.756,
						"acc_stderr,none": 0.013588548437881416,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816318,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.011800434324644608,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.011419913065098689,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403626,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.013699915608779773,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.00833333333333334,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.821,
						"acc_stderr,none": 0.012128730605719123,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.01444273494157502,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.011569479368271306,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679195,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340983,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474933,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.705,
						"acc_stderr,none": 0.014428554438445514,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.866,
						"acc_stderr,none": 0.010777762298369678,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.613,
						"acc_stderr,none": 0.015410011955493935,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.681,
						"acc_stderr,none": 0.01474640486547349,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.01074366913239734,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785133,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.00949157995752506,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696832,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557844004,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727046,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000002,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747386,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.769,
						"acc_stderr,none": 0.013334797216936426,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.015704987954361795,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.0056518088204523705,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487912,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.003845749574503004,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.802,
						"acc_stderr,none": 0.012607733934175318,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.529,
						"acc_stderr,none": 0.015792669451628896,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.789,
						"acc_stderr,none": 0.012909130321042095,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.011075814808567038,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.66,
						"acc_stderr,none": 0.014987482264363937,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037183,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695801,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.823,
						"acc_stderr,none": 0.012075463420375061,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475315,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792949,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333366,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611455,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452372,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.442,
						"acc_stderr,none": 0.0157125072118642,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.327,
						"acc_stderr,none": 0.014842213153411242,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6311926605504588,
						"acc_stderr,none": 0.008438656079759072,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.48214285714285715,
						"acc_stderr,none": 0.0673769750864465,
						"alias": "cb",
						"f1,none": 0.30756302521008405,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.24665676077265974,
						"acc_norm,none": 0.24665676077265974,
						"acc_norm_stderr,none": 0.11933299304240107,
						"acc_stderr,none": 0.11933299304240107,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757577,
						"acc_stderr,none": 0.07575757575757577,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.06429065810876616,
						"acc_stderr,none": 0.06429065810876616,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.21818181818181817,
						"acc_norm,none": 0.21818181818181817,
						"acc_norm_stderr,none": 0.05620374845754972,
						"acc_stderr,none": 0.05620374845754972,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.35135135135135137,
						"acc_norm,none": 0.35135135135135137,
						"acc_norm_stderr,none": 0.0795654132101608,
						"acc_stderr,none": 0.0795654132101608,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.14285714285714285,
						"acc_norm,none": 0.14285714285714285,
						"acc_norm_stderr,none": 0.07824607964359516,
						"acc_stderr,none": 0.07824607964359516,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857371,
						"acc_stderr,none": 0.08982552969857371,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.0967741935483871,
						"acc_norm,none": 0.0967741935483871,
						"acc_norm_stderr,none": 0.053978066228004884,
						"acc_stderr,none": 0.053978066228004884,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.3888888888888889,
						"acc_norm,none": 0.3888888888888889,
						"acc_norm_stderr,none": 0.11823563735376173,
						"acc_stderr,none": 0.11823563735376173,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894599,
						"acc_stderr,none": 0.10540925533894599,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.32653061224489793,
						"acc_norm,none": 0.32653061224489793,
						"acc_norm_stderr,none": 0.06768622021133469,
						"acc_stderr,none": 0.06768622021133469,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.058172215566282534,
						"acc_stderr,none": 0.058172215566282534,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.06390760676613884,
						"acc_stderr,none": 0.06390760676613884,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.1956521739130435,
						"acc_norm,none": 0.1956521739130435,
						"acc_norm_stderr,none": 0.05913682829884974,
						"acc_stderr,none": 0.05913682829884974,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996391,
						"acc_stderr,none": 0.08081046758996391,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2587635986876187,
						"acc_norm,none": 0.2587635986876187,
						"acc_norm_stderr,none": 0.0393099515325355,
						"acc_stderr,none": 0.0393099515325355,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.27218934911242604,
						"acc_norm,none": 0.27218934911242604,
						"acc_norm_stderr,none": 0.034339196275485345,
						"acc_stderr,none": 0.034339196275485345,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.22297297297297297,
						"acc_norm,none": 0.22297297297297297,
						"acc_norm_stderr,none": 0.034330925181040015,
						"acc_stderr,none": 0.034330925181040015,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.034049163262375844,
						"acc_stderr,none": 0.034049163262375844,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.035465630196243374,
						"acc_stderr,none": 0.035465630196243374,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2583732057416268,
						"acc_norm,none": 0.2583732057416268,
						"acc_norm_stderr,none": 0.030351822614803427,
						"acc_stderr,none": 0.030351822614803427,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2366412213740458,
						"acc_norm,none": 0.2366412213740458,
						"acc_norm_stderr,none": 0.03727673575596916,
						"acc_stderr,none": 0.03727673575596916,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25735294117647056,
						"acc_norm,none": 0.25735294117647056,
						"acc_norm_stderr,none": 0.03762607496624008,
						"acc_stderr,none": 0.03762607496624008,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.042188119282053044,
						"acc_stderr,none": 0.042188119282053044,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2755417956656347,
						"acc_norm,none": 0.2755417956656347,
						"acc_norm_stderr,none": 0.02489845928700081,
						"acc_stderr,none": 0.02489845928700081,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03039153369274154,
						"acc_stderr,none": 0.03039153369274154,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.27932960893854747,
						"acc_norm,none": 0.27932960893854747,
						"acc_norm_stderr,none": 0.033629222387143616,
						"acc_stderr,none": 0.033629222387143616,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.23628691983122363,
						"acc_norm,none": 0.23628691983122363,
						"acc_norm_stderr,none": 0.02765215314415928,
						"acc_stderr,none": 0.02765215314415928,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.04350546818999062,
						"acc_stderr,none": 0.04350546818999062,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.308411214953271,
						"acc_norm,none": 0.308411214953271,
						"acc_norm_stderr,none": 0.04485760883316698,
						"acc_stderr,none": 0.04485760883316698,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800373,
						"acc_stderr,none": 0.04142972007800373,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.1574074074074074,
						"acc_norm,none": 0.1574074074074074,
						"acc_norm_stderr,none": 0.03520703990517964,
						"acc_stderr,none": 0.03520703990517964,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.04513676718168308,
						"acc_stderr,none": 0.04513676718168308,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.043960933774393765,
						"acc_stderr,none": 0.043960933774393765,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2490842490842491,
						"acc_norm,none": 0.2490842490842491,
						"acc_norm_stderr,none": 0.026223115500506114,
						"acc_stderr,none": 0.026223115500506114,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.24019607843137256,
						"acc_norm,none": 0.24019607843137256,
						"acc_norm_stderr,none": 0.02998373305591362,
						"acc_stderr,none": 0.02998373305591362,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.26900584795321636,
						"acc_norm,none": 0.26900584795321636,
						"acc_norm_stderr,none": 0.03401052620104089,
						"acc_stderr,none": 0.03401052620104089,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2733812949640288,
						"acc_norm,none": 0.2733812949640288,
						"acc_norm_stderr,none": 0.037940071215336206,
						"acc_stderr,none": 0.037940071215336206,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.03583711288976435,
						"acc_stderr,none": 0.03583711288976435,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25766871165644173,
						"acc_norm,none": 0.25766871165644173,
						"acc_norm_stderr,none": 0.03436150827846917,
						"acc_stderr,none": 0.03436150827846917,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.03336605189761063,
						"acc_stderr,none": 0.03336605189761063,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.02775179241879092,
						"acc_stderr,none": 0.02775179241879092,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2474747474747475,
						"acc_norm,none": 0.2474747474747475,
						"acc_norm_stderr,none": 0.030746300742124498,
						"acc_stderr,none": 0.030746300742124498,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.028657491285071966,
						"acc_stderr,none": 0.028657491285071966,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24782608695652175,
						"acc_norm,none": 0.24782608695652175,
						"acc_norm_stderr,none": 0.02853086259541008,
						"acc_stderr,none": 0.02853086259541008,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.037498507091740234,
						"acc_stderr,none": 0.037498507091740234,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.036421927837417066,
						"acc_stderr,none": 0.036421927837417066,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2556818181818182,
						"acc_norm,none": 0.2556818181818182,
						"acc_norm_stderr,none": 0.03297692925434461,
						"acc_stderr,none": 0.03297692925434461,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2550335570469799,
						"acc_norm,none": 0.2550335570469799,
						"acc_norm_stderr,none": 0.035829121651111746,
						"acc_stderr,none": 0.035829121651111746,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101963,
						"acc_stderr,none": 0.03334150198101963,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.03661433360410717,
						"acc_stderr,none": 0.03661433360410717,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714263,
						"acc_stderr,none": 0.04025566684714263,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2865853658536585,
						"acc_norm,none": 0.2865853658536585,
						"acc_norm_stderr,none": 0.03541638332993505,
						"acc_stderr,none": 0.03541638332993505,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2636363636363636,
						"acc_norm,none": 0.2636363636363636,
						"acc_norm_stderr,none": 0.04220224692971987,
						"acc_stderr,none": 0.04220224692971987,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3006993006993007,
						"acc_norm,none": 0.3006993006993007,
						"acc_norm_stderr,none": 0.03848167949490064,
						"acc_stderr,none": 0.03848167949490064,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2698412698412698,
						"acc_norm,none": 0.2698412698412698,
						"acc_norm_stderr,none": 0.03970158273235172,
						"acc_stderr,none": 0.03970158273235172,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.22162162162162163,
						"acc_norm,none": 0.22162162162162163,
						"acc_norm_stderr,none": 0.030619107991457357,
						"acc_stderr,none": 0.030619107991457357,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.22093023255813954,
						"acc_norm,none": 0.22093023255813954,
						"acc_norm_stderr,none": 0.03172617353438933,
						"acc_stderr,none": 0.03172617353438933,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26520681265206814,
						"acc_norm,none": 0.26520681265206814,
						"acc_norm_stderr,none": 0.021801329069745197,
						"acc_stderr,none": 0.021801329069745197,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2570093457943925,
						"acc_norm,none": 0.2570093457943925,
						"acc_norm_stderr,none": 0.02994169153324464,
						"acc_stderr,none": 0.02994169153324464,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.040113743936211456,
						"acc_stderr,none": 0.040113743936211456,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.29508196721311475,
						"acc_norm,none": 0.29508196721311475,
						"acc_norm_stderr,none": 0.04146178164901211,
						"acc_stderr,none": 0.04146178164901211,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.03041268445992876,
						"acc_stderr,none": 0.03041268445992876,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.033477857599386346,
						"acc_stderr,none": 0.033477857599386346,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.26455026455026454,
						"acc_norm,none": 0.26455026455026454,
						"acc_norm_stderr,none": 0.03217004537697526,
						"acc_stderr,none": 0.03217004537697526,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.28448275862068967,
						"acc_norm,none": 0.28448275862068967,
						"acc_norm_stderr,none": 0.0420716075558402,
						"acc_stderr,none": 0.0420716075558402,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2206896551724138,
						"acc_norm,none": 0.2206896551724138,
						"acc_norm_stderr,none": 0.03455930201924812,
						"acc_stderr,none": 0.03455930201924812,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.043362909039199406,
						"acc_stderr,none": 0.043362909039199406,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.24571428571428572,
						"acc_norm,none": 0.24571428571428572,
						"acc_norm_stderr,none": 0.032636871426278406,
						"acc_stderr,none": 0.032636871426278406,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2890995260663507,
						"acc_norm,none": 0.2890995260663507,
						"acc_norm_stderr,none": 0.03128372390561387,
						"acc_stderr,none": 0.03128372390561387,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2632978723404255,
						"acc_norm,none": 0.2632978723404255,
						"acc_norm_stderr,none": 0.022743327388426434,
						"acc_stderr,none": 0.022743327388426434,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.028490144114909487,
						"acc_stderr,none": 0.028490144114909487,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.27011494252873564,
						"acc_norm,none": 0.27011494252873564,
						"acc_norm_stderr,none": 0.03375813841943684,
						"acc_stderr,none": 0.03375813841943684,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.23703703703703705,
						"acc_norm,none": 0.23703703703703705,
						"acc_norm_stderr,none": 0.03673731683969506,
						"acc_stderr,none": 0.03673731683969506,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3008849557522124,
						"acc_norm,none": 0.3008849557522124,
						"acc_norm_stderr,none": 0.03057618529758098,
						"acc_stderr,none": 0.03057618529758098,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.23030303030303031,
						"acc_norm,none": 0.23030303030303031,
						"acc_norm_stderr,none": 0.03287666758603489,
						"acc_stderr,none": 0.03287666758603489,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.032530209055933366,
						"acc_stderr,none": 0.032530209055933366,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2236024844720497,
						"acc_norm,none": 0.2236024844720497,
						"acc_norm_stderr,none": 0.03293975688757214,
						"acc_stderr,none": 0.03293975688757214,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.034893706520187605,
						"acc_stderr,none": 0.034893706520187605,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.01348864658799917,
						"mcc_stderr,none": 0.031897274908963213
					},
					"copa": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.0358870281282637,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.2936419200954083,
						"likelihood_diff_stderr,none": 0.47620486364168096,
						"pct_stereotype,none": 0.5736434108527132,
						"pct_stereotype_stderr,none": 0.08125104249263165
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.4326923076923075,
						"likelihood_diff_stderr,none": 0.08426362543429865,
						"pct_stereotype,none": 0.6118067978533095,
						"pct_stereotype_stderr,none": 0.011904032527924666
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.771978021978022,
						"likelihood_diff_stderr,none": 0.39680497388039415,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.363636363636363,
						"likelihood_diff_stderr,none": 2.0094559520279356,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.042307692307692,
						"likelihood_diff_stderr,none": 0.5937770548180541,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.05769230769230768
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.414453125,
						"likelihood_diff_stderr,none": 0.16268824336973334,
						"pct_stereotype,none": 0.61875,
						"pct_stereotype_stderr,none": 0.02719363040277547
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.2690972222222223,
						"likelihood_diff_stderr,none": 0.22779566871936482,
						"pct_stereotype,none": 0.5231481481481481,
						"pct_stereotype_stderr,none": 0.03406315360711507
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.529513888888889,
						"likelihood_diff_stderr,none": 0.31619000076805476,
						"pct_stereotype,none": 0.7361111111111112,
						"pct_stereotype_stderr,none": 0.052306187285139825
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.1985728346456694,
						"likelihood_diff_stderr,none": 0.1447880691532075,
						"pct_stereotype,none": 0.49015748031496065,
						"pct_stereotype_stderr,none": 0.02220147678894261
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.7015765765765765,
						"likelihood_diff_stderr,none": 0.3412620623157685,
						"pct_stereotype,none": 0.7387387387387387,
						"pct_stereotype_stderr,none": 0.041887708614323976
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.474462365591398,
						"likelihood_diff_stderr,none": 0.3894085917424617,
						"pct_stereotype,none": 0.8602150537634409,
						"pct_stereotype_stderr,none": 0.036152622588464155
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.093421052631579,
						"likelihood_diff_stderr,none": 0.23405526081677008,
						"pct_stereotype,none": 0.7157894736842105,
						"pct_stereotype_stderr,none": 0.03280815673574656
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.153026237328563,
						"likelihood_diff_stderr,none": 0.07354217858241796,
						"pct_stereotype,none": 0.5348837209302325,
						"pct_stereotype_stderr,none": 0.012183538867674261
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 2.7444444444444445,
						"likelihood_diff_stderr,none": 0.2775117327708793,
						"pct_stereotype,none": 0.5222222222222223,
						"pct_stereotype_stderr,none": 0.05294752255076824
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.2403846153846154,
						"likelihood_diff_stderr,none": 0.5235751303869994,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.9772727272727275,
						"likelihood_diff_stderr,none": 0.44504940903012946,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.05524032911365453
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.637461059190031,
						"likelihood_diff_stderr,none": 0.1424049101957193,
						"pct_stereotype,none": 0.5482866043613707,
						"pct_stereotype_stderr,none": 0.02782020420481579
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.4792490118577075,
						"likelihood_diff_stderr,none": 0.18572841598062043,
						"pct_stereotype,none": 0.3952569169960474,
						"pct_stereotype_stderr,none": 0.030798170848773867
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.4375,
						"likelihood_diff_stderr,none": 0.4625750689828025,
						"pct_stereotype,none": 0.625,
						"pct_stereotype_stderr,none": 0.05745481997211521
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.9551630434782608,
						"likelihood_diff_stderr,none": 0.1381246240821433,
						"pct_stereotype,none": 0.40869565217391307,
						"pct_stereotype_stderr,none": 0.022945588573986354
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.1902173913043477,
						"likelihood_diff_stderr,none": 0.26919691843968635,
						"pct_stereotype,none": 0.7043478260869566,
						"pct_stereotype_stderr,none": 0.04273972288221525
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.4835164835164836,
						"likelihood_diff_stderr,none": 0.32392402422594335,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.417091836734694,
						"likelihood_diff_stderr,none": 0.2351747762033498,
						"pct_stereotype,none": 0.7244897959183674,
						"pct_stereotype_stderr,none": 0.03199393624667903
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.022637795275590553,
						"exact_match_stderr,none": 0.0033005770276179373
					},
					"glue": {
						"acc,none": 0.5636752269105417,
						"acc_stderr,none": 0.08286104931377031,
						"alias": "glue",
						"f1,none": 0.4166933228499848,
						"f1_stderr,none": 0.001562521811103853,
						"mcc,none": 0.01348864658799917,
						"mcc_stderr,none": 0.001017436146617974
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.0,
						"exact_match_stderr,get-answer": 0.0
					},
					"hellaswag": {
						"acc,none": 0.49063931487751444,
						"acc_norm,none": 0.6525592511451902,
						"acc_norm_stderr,none": 0.004751840646730852,
						"acc_stderr,none": 0.00498890690130774,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.29659254981230143,
						"acc_norm,none": 0.29659254981230143,
						"acc_norm_stderr,none": 0.033980234399003946,
						"acc_stderr,none": 0.033980234399003946,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.16,
						"acc_norm,none": 0.16,
						"acc_norm_stderr,none": 0.03684529491774709,
						"acc_stderr,none": 0.03684529491774709,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.306,
						"acc_norm,none": 0.306,
						"acc_norm_stderr,none": 0.014580006055436965,
						"acc_stderr,none": 0.014580006055436965,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.299,
						"acc_norm,none": 0.299,
						"acc_norm_stderr,none": 0.014484778521220478,
						"acc_stderr,none": 0.014484778521220478,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809949,
						"acc_stderr,none": 0.013963164754809949,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.01422115470843492,
						"acc_stderr,none": 0.01422115470843492,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.23666666666666666,
						"acc_norm,none": 0.23666666666666666,
						"acc_norm_stderr,none": 0.01736649795856463,
						"acc_stderr,none": 0.01736649795856463,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.328,
						"acc_norm,none": 0.328,
						"acc_norm_stderr,none": 0.01485384248727033,
						"acc_stderr,none": 0.01485384248727033,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.334,
						"acc_norm,none": 0.334,
						"acc_norm_stderr,none": 0.014922019523732958,
						"acc_stderr,none": 0.014922019523732958,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.343,
						"acc_norm,none": 0.343,
						"acc_norm_stderr,none": 0.015019206922356951,
						"acc_stderr,none": 0.015019206922356951,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.030056479497755487,
						"acc_stderr,none": 0.030056479497755487,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.34,
						"acc_norm,none": 0.34,
						"acc_norm_stderr,none": 0.014987482264363937,
						"acc_stderr,none": 0.014987482264363937,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.25384615384615383,
						"acc_norm,none": 0.25384615384615383,
						"acc_norm_stderr,none": 0.03831815850874499,
						"acc_stderr,none": 0.03831815850874499,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.046056618647183814,
						"acc_stderr,none": 0.046056618647183814,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.316,
						"acc_norm,none": 0.316,
						"acc_norm_stderr,none": 0.014709193056057127,
						"acc_stderr,none": 0.014709193056057127,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.342,
						"acc_norm,none": 0.342,
						"acc_norm_stderr,none": 0.015008706182121731,
						"acc_stderr,none": 0.015008706182121731,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796275,
						"acc_stderr,none": 0.013996674851796275,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.322,
						"acc_norm,none": 0.322,
						"acc_norm_stderr,none": 0.014782913600996659,
						"acc_stderr,none": 0.014782913600996659,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.014370995982377944,
						"acc_stderr,none": 0.014370995982377944,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740671,
						"acc_stderr,none": 0.014142984975740671,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.31,
						"acc_norm,none": 0.31,
						"acc_norm_stderr,none": 0.014632638658632896,
						"acc_stderr,none": 0.014632638658632896,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.304,
						"acc_norm,none": 0.304,
						"acc_norm_stderr,none": 0.014553205687950451,
						"acc_stderr,none": 0.014553205687950451,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.332,
						"acc_norm,none": 0.332,
						"acc_norm_stderr,none": 0.014899597242811475,
						"acc_stderr,none": 0.014899597242811475,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.335,
						"acc_norm,none": 0.335,
						"acc_norm_stderr,none": 0.014933117490932572,
						"acc_stderr,none": 0.014933117490932572,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.01437099598237794,
						"acc_stderr,none": 0.01437099598237794,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.236,
						"acc_norm,none": 0.236,
						"acc_norm_stderr,none": 0.01343445140243869,
						"acc_stderr,none": 0.01343445140243869,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.014512395033543152,
						"acc_stderr,none": 0.014512395033543152,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809942,
						"acc_stderr,none": 0.013963164754809942,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.31333333333333335,
						"acc_norm,none": 0.31333333333333335,
						"acc_norm_stderr,none": 0.0189523414032947,
						"acc_stderr,none": 0.0189523414032947,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145156,
						"acc_stderr,none": 0.013979965645145156,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.306,
						"acc_norm,none": 0.306,
						"acc_norm_stderr,none": 0.014580006055436967,
						"acc_stderr,none": 0.014580006055436967,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.299,
						"acc_norm,none": 0.299,
						"acc_norm_stderr,none": 0.01448477852122048,
						"acc_stderr,none": 0.01448477852122048,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.315,
						"acc_norm,none": 0.315,
						"acc_norm_stderr,none": 0.0146966319607925,
						"acc_stderr,none": 0.0146966319607925,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.038612291966536934,
						"acc_stderr,none": 0.038612291966536934,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.023956482285140766,
						"acc_stderr,none": 0.023956482285140766,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.01366318713487765,
						"acc_stderr,none": 0.01366318713487765,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.336,
						"acc_norm,none": 0.336,
						"acc_norm_stderr,none": 0.014944140233795023,
						"acc_stderr,none": 0.014944140233795023,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.308,
						"acc_norm,none": 0.308,
						"acc_norm_stderr,none": 0.01460648312734276,
						"acc_stderr,none": 0.01460648312734276,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.030695456590127176,
						"acc_stderr,none": 0.030695456590127176,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796282,
						"acc_stderr,none": 0.013996674851796282,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259717,
						"acc_stderr,none": 0.013929286594259717,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.029832025555495235,
						"acc_stderr,none": 0.029832025555495235,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.321,
						"acc_norm,none": 0.321,
						"acc_norm_stderr,none": 0.014770821817934644,
						"acc_stderr,none": 0.014770821817934644,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.516334137250603,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.0005010020040080159,
						"acc_stderr,none": 0.058979921873439374,
						"alias": "kobest",
						"f1,none": 0.41741847040164487,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5028490028490028,
						"acc_stderr,none": 0.013348550797680823,
						"alias": " - kobest_boolq",
						"f1,none": 0.3371320037986705,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.587,
						"acc_stderr,none": 0.015577986829936531,
						"alias": " - kobest_copa",
						"f1,none": 0.586161978005461,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.378,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.022383074051792257,
						"acc_stderr,none": 0.02170655082451818,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3740942147315669,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.6498740554156172,
						"acc_stderr,none": 0.023970613717700776,
						"alias": " - kobest_sentineg",
						"f1,none": 0.6146680725373406,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6676693188433922,
						"acc_stderr,none": 0.01689472165812205,
						"alias": "lambada",
						"perplexity,none": 4.582669382377997,
						"perplexity_stderr,none": 0.3358079026952327
					},
					"lambada_cloze": {
						"acc,none": 0.03415486124587619,
						"acc_stderr,none": 0.0027851796484193573,
						"alias": "lambada_cloze",
						"perplexity,none": 262.2070425489701,
						"perplexity_stderr,none": 20.08698851462437
					},
					"lambada_multilingual": {
						"acc,none": 0.473665825732583,
						"acc_stderr,none": 0.06613832944832002,
						"alias": "lambada_multilingual",
						"perplexity,none": 33.10052741124072,
						"perplexity_stderr,none": 9.7081463992688
					},
					"lambada_openai": {
						"acc,none": 0.6984281001358432,
						"acc_stderr,none": 0.00639393711933144,
						"alias": " - lambada_openai",
						"perplexity,none": 3.940129642053726,
						"perplexity_stderr,none": 0.08144039495949996
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.03182612070638463,
						"acc_stderr,none": 0.0024455728613517317,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 225.18320098383734,
						"perplexity_stderr,none": 6.703890117190916
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3801668930719969,
						"acc_stderr,none": 0.006762956659647623,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 49.14912991044829,
						"perplexity_stderr,none": 2.838657872147601
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6986221618474675,
						"acc_stderr,none": 0.0063927674829785145,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.942126598314275,
						"perplexity_stderr,none": 0.08157901258498655
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.39355715117407336,
						"acc_stderr,none": 0.006806297320641507,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 42.73709405729042,
						"perplexity_stderr,none": 2.1907946376198324
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.47137589753541626,
						"acc_stderr,none": 0.00695455329137301,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 27.73453501602614,
						"perplexity_stderr,none": 1.4370930595989637
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.4246070250339608,
						"acc_stderr,none": 0.006886331702011291,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 41.93975147412448,
						"perplexity_stderr,none": 2.365656198842874
					},
					"lambada_standard": {
						"acc,none": 0.6365224141276926,
						"acc_stderr,none": 0.006701279636433246,
						"alias": " - lambada_standard",
						"perplexity,none": 5.222890299411447,
						"perplexity_stderr,none": 0.1179746098096352
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.03648360178536775,
						"acc_stderr,none": 0.00261210410334047,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 299.2308841141028,
						"perplexity_stderr,none": 8.751853212002933
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.2538167938931298,
						"exact_match_stderr,get-answer": 0.01097980986708506
					},
					"logiqa": {
						"acc,none": 0.21351766513056836,
						"acc_norm,none": 0.2411674347158218,
						"acc_norm_stderr,none": 0.016779369344911064,
						"acc_stderr,none": 0.016073287529685204,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.24872773536895673,
						"acc_norm,none": 0.24936386768447838,
						"acc_norm_stderr,none": 0.010915494193142777,
						"acc_stderr,none": 0.010906180806103546,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.24824120603015076,
						"acc_norm,none": 0.2525963149078727,
						"acc_norm_stderr,none": 0.007954112207299583,
						"acc_stderr,none": 0.0079081843625755,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.5594153780978606,
						"acc_stderr,none": 0.0051094348849251145,
						"alias": "mc_taco",
						"f1,none": 0.4110985277463194,
						"f1_stderr,none": 0.007378737079530324
					},
					"medmcqa": {
						"acc,none": 0.2596222806598135,
						"acc_norm,none": 0.2596222806598135,
						"acc_norm_stderr,none": 0.006779624437908079,
						"acc_stderr,none": 0.006779624437908079,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.23252160251374707,
						"acc_norm,none": 0.23252160251374707,
						"acc_norm_stderr,none": 0.011844621250896447,
						"acc_stderr,none": 0.011844621250896447,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.2584389688078621,
						"acc_stderr,none": 0.03769663611577144,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036847,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.22962962962962963,
						"acc_stderr,none": 0.03633384414073463,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.20394736842105263,
						"acc_stderr,none": 0.03279000406310049,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2792452830188679,
						"acc_stderr,none": 0.027611163402399715,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.18055555555555555,
						"acc_stderr,none": 0.032166008088022675,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.042295258468165044,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.28901734104046245,
						"acc_stderr,none": 0.03456425745086999,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.040925639582376536,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847415,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.28936170212765955,
						"acc_stderr,none": 0.02964400657700962,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.04049339297748141,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2413793103448276,
						"acc_stderr,none": 0.03565998174135302,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2751322751322751,
						"acc_stderr,none": 0.023000086859068652,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.31746031746031744,
						"acc_stderr,none": 0.04163453031302859,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2838709677419355,
						"acc_stderr,none": 0.025649381063029265,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.29064039408866993,
						"acc_stderr,none": 0.03194740072265541,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.20606060606060606,
						"acc_stderr,none": 0.031584153240477086,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.20707070707070707,
						"acc_stderr,none": 0.02886977846026703,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.2538860103626943,
						"acc_stderr,none": 0.03141024780565319,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2846153846153846,
						"acc_stderr,none": 0.022878322799706283,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.02730914058823016,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.29831932773109243,
						"acc_stderr,none": 0.02971914287634285,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.26490066225165565,
						"acc_stderr,none": 0.036030385453603826,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.24220183486238533,
						"acc_stderr,none": 0.018368176306598615,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.18518518518518517,
						"acc_stderr,none": 0.026491914727355154,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.23039215686274508,
						"acc_stderr,none": 0.029554292605695087,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2616033755274262,
						"acc_stderr,none": 0.028609516716994934,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.32286995515695066,
						"acc_stderr,none": 0.031381476375754995,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2595419847328244,
						"acc_stderr,none": 0.03844876139785271,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.24760892667375134,
						"acc_stderr,none": 0.027374386633605065,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.24793388429752067,
						"acc_stderr,none": 0.03941897526516304,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.26851851851851855,
						"acc_stderr,none": 0.04284467968052192,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.24539877300613497,
						"acc_stderr,none": 0.03380939813943354,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.33035714285714285,
						"acc_stderr,none": 0.04464285714285713,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2815533980582524,
						"acc_stderr,none": 0.04453254836326468,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2564102564102564,
						"acc_stderr,none": 0.02860595370200425,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2656449553001277,
						"acc_stderr,none": 0.01579430248788872,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.24277456647398843,
						"acc_stderr,none": 0.023083658586984204,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24804469273743016,
						"acc_stderr,none": 0.014444157808261448,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.27450980392156865,
						"acc_stderr,none": 0.025553169991826507,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.27647248149340203,
						"acc_stderr,none": 0.03588623150924139,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.26688102893890675,
						"acc_stderr,none": 0.025122637608816646,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.02409347123262133,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.23404255319148937,
						"acc_stderr,none": 0.025257861359432428,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2392438070404172,
						"acc_stderr,none": 0.010896123652676662,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.29044117647058826,
						"acc_stderr,none": 0.02757646862274053,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.25980392156862747,
						"acc_stderr,none": 0.01774089950917779,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.35454545454545455,
						"acc_stderr,none": 0.04582004841505416,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2530612244897959,
						"acc_stderr,none": 0.027833023871399663,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.25804354891127723,
						"acc_stderr,none": 0.0377472982452215,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.22885572139303484,
						"acc_stderr,none": 0.02970528405677245,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.25721535045987953,
						"acc_stderr,none": 0.04803643340995113,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.0416333199893227,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3192771084337349,
						"acc_stderr,none": 0.0362933532994786,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.26900584795321636,
						"acc_stderr,none": 0.03401052620104091,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.42241467142129396,
						"acc_stderr,none": 0.00498602608933982,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.4306346623270952,
						"acc_stderr,none": 0.004994030104323812,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7058823529411765,
						"acc_stderr,none": 0.022585489065607776,
						"alias": "mrpc",
						"f1,none": 0.8203592814371258,
						"f1_stderr,none": 0.016116395762022426
					},
					"multimedqa": {
						"acc,none": 0.2858765081618169,
						"acc_norm,none": 0.24859762943954988,
						"acc_norm_stderr,none": 9.71291553985608e-05,
						"acc_stderr,none": 0.10989543345188667,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5713696369636964,
						"acc_stderr,none": 0.007108263771672479,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6863243057146567,
						"mrr_stderr,none": 0.010401342807360337,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.40970654627539504,
						"r@2_stderr,none": 0.01653098758467983
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.634687737034083,
						"mrr_stderr,none": 0.010334640488200536,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.47742663656884876,
						"r@2_stderr,none": 0.016790178837117337
					},
					"openbookqa": {
						"acc,none": 0.274,
						"acc_norm,none": 0.394,
						"acc_norm_stderr,none": 0.021874299301689253,
						"acc_stderr,none": 0.01996610354027947,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.01117991481396971,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4845,
						"acc_stderr,none": 0.01117776123260332,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4945,
						"acc_stderr,none": 0.011182459420867635,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5365,
						"acc_stderr,none": 0.011153298751334336,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5535,
						"acc_stderr,none": 0.01111893386729012,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.549,
						"acc_stderr,none": 0.011129305041886322,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5145,
						"acc_stderr,none": 0.011178432523249468,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5206428571428573,
						"acc_stderr,none": 0.020526266658686284,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.750272034820457,
						"acc_norm,none": 0.7540805223068553,
						"acc_norm_stderr,none": 0.010047331865625193,
						"acc_stderr,none": 0.010099232969867497,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2432216054654142,
						"acc_norm,none": 0.27300384286934243,
						"acc_norm_stderr,none": 0.0032547946169136665,
						"acc_stderr,none": 0.003134430099234369,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.020354375480530075,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7255285413084764,
						"acc_norm,none": 0.5504572903781023,
						"acc_norm_stderr,none": 0.004127389115696987,
						"acc_stderr,none": 0.1402185213485563,
						"alias": "pythia",
						"bits_per_byte,none": 0.6821754804612891,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6045574892397707,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.940129642053726,
						"perplexity_stderr,none": 0.08144039495949996,
						"word_perplexity,none": 12.535083566781024,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3102836879432624,
						"acc_norm,none": 0.3723404255319149,
						"acc_norm_stderr,none": 0.04444241943937857,
						"acc_stderr,none": 0.035024147368704364,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.4666666666666667,
						"acc_norm_stderr,none": 0.0457329560380023,
						"acc_stderr,none": 0.04321358157014425,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.2875,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.037826149818120415,
						"acc_stderr,none": 0.035893251060583956,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.31338028169014087,
						"acc_norm,none": 0.34507042253521125,
						"acc_norm_stderr,none": 0.028259075656935143,
						"acc_stderr,none": 0.027574062217983558,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5586673988650924,
						"acc_stderr,none": 0.006718677905071418,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.6201830324016819,
						"acc_stderr,none": 0.002413796323624817,
						"alias": "qqp",
						"f1,none": 0.41268262831790714,
						"f1_stderr,none": 0.0038231338071202216
					},
					"race": {
						"acc,none": 0.3416267942583732,
						"acc_stderr,none": 0.014677827770761076,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5884476534296029,
						"acc_stderr,none": 0.029621832222417196,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.908,
						"acc_norm,none": 0.863,
						"acc_norm_stderr,none": 0.010878848714333315,
						"acc_stderr,none": 0.00914437639315111,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5884476534296029,
						"acc_stderr,none": 0.029621832222417196,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.9369266055045872,
						"acc_stderr,none": 0.008236957223179246,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5630810756772968,
						"acc_norm,none": 0.7619714085774267,
						"acc_norm_stderr,none": 0.00301102681551992,
						"acc_stderr,none": 0.003506845363494957,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5005823433496389,
						"acc_stderr,none": 0.010906418362338145,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.005004255426437999,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.47714604236343366,
						"acc_stderr,none": 0.005028579346022087,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5238235294117647,
						"acc_stderr,none": 0.004945357260283834,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.2840977451542544,
						"acc_stderr,none": 0.001030773388837267,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.006119951040391677,
						"bleu_acc_stderr,none": 0.0027302089178066944,
						"bleu_diff,none": 0.0008121943067088997,
						"bleu_diff_stderr,none": 0.0009190496069811475,
						"bleu_max,none": 0.014669252725972394,
						"bleu_max_stderr,none": 0.0021873048191628336,
						"rouge1_acc,none": 0.08567931456548347,
						"rouge1_acc_stderr,none": 0.009798107161456841,
						"rouge1_diff,none": 0.08075047582228334,
						"rouge1_diff_stderr,none": 0.22057176839090892,
						"rouge1_max,none": 2.470406639678096,
						"rouge1_max_stderr,none": 0.23793324013893466,
						"rouge2_acc,none": 0.0,
						"rouge2_acc_stderr,none": 0.0,
						"rouge2_diff,none": 0.0,
						"rouge2_diff_stderr,none": 0.0,
						"rouge2_max,none": 0.0,
						"rouge2_max_stderr,none": 0.0,
						"rougeL_acc,none": 0.0832313341493268,
						"rougeL_acc_stderr,none": 0.009670039081592304,
						"rougeL_diff,none": 0.07925279528550083,
						"rougeL_diff_stderr,none": 0.22051394405368874,
						"rougeL_max,none": 2.4619884299823385,
						"rougeL_max_stderr,none": 0.23793568204894983
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.006119951040391677,
						"bleu_acc_stderr,none": 0.0027302089178066944,
						"bleu_diff,none": 0.0008121943067088997,
						"bleu_diff_stderr,none": 0.0009190496069811475,
						"bleu_max,none": 0.014669252725972394,
						"bleu_max_stderr,none": 0.0021873048191628336,
						"rouge1_acc,none": 0.08567931456548347,
						"rouge1_acc_stderr,none": 0.009798107161456841,
						"rouge1_diff,none": 0.08075047582228334,
						"rouge1_diff_stderr,none": 0.22057176839090892,
						"rouge1_max,none": 2.470406639678096,
						"rouge1_max_stderr,none": 0.23793324013893466,
						"rouge2_acc,none": 0.0,
						"rouge2_acc_stderr,none": 0.0,
						"rouge2_diff,none": 0.0,
						"rouge2_diff_stderr,none": 0.0,
						"rouge2_max,none": 0.0,
						"rouge2_max_stderr,none": 0.0,
						"rougeL_acc,none": 0.0832313341493268,
						"rougeL_acc_stderr,none": 0.009670039081592304,
						"rougeL_diff,none": 0.07925279528550083,
						"rougeL_diff_stderr,none": 0.22051394405368874,
						"rougeL_max,none": 2.4619884299823385,
						"rougeL_max_stderr,none": 0.23793568204894983
					},
					"truthfulqa_mc1": {
						"acc,none": 0.22643818849449204,
						"acc_stderr,none": 0.014651337324602587,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.34173739164868633,
						"acc_stderr,none": 0.013556352902918343,
						"alias": "truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.022637795275590553,
						"exact_match_stderr,none": 0.0033005770276179373
					},
					"wic": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.01981072129375818,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6821754804612891,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6045574892397707,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 12.535083566781024,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6243093922651933,
						"acc_stderr,none": 0.013611257508380444,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737999,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.3557692307692308,
						"acc_stderr,none": 0.04717221961050337,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8461538461538461,
						"acc_stderr,none": 0.02187678688440468,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6014545454545455,
						"acc_stderr,none": 0.053116283731232235,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.021930844120728505,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.712,
						"acc_stderr,none": 0.02027150383507522,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.648,
						"acc_stderr,none": 0.02138004238594604,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.022357273881016403,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.022221331534143025,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.02223197069632112,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.02185468495561126,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.664,
						"acc_stderr,none": 0.021144791425048853,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.662,
						"acc_stderr,none": 0.02117566569520941,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.41204819277108434,
						"acc_stderr,none": 0.040764307073034814,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3377510040160643,
						"acc_stderr,none": 0.009479742273956478,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.42409638554216866,
						"acc_stderr,none": 0.009905918244994481,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.42690763052208835,
						"acc_stderr,none": 0.009914408828583408,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3682730923694779,
						"acc_stderr,none": 0.009668013178998446,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5084337349397591,
						"acc_stderr,none": 0.010020647068114183,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4530120481927711,
						"acc_stderr,none": 0.00997771990435373,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.45220883534136547,
						"acc_stderr,none": 0.00997618708680372,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.39076305220883534,
						"acc_stderr,none": 0.009779967579941793,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4461847389558233,
						"acc_stderr,none": 0.009963854274139157,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3863453815261044,
						"acc_stderr,none": 0.009759721337538349,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.39718875502008033,
						"acc_stderr,none": 0.009807915070677296,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.44859437751004017,
						"acc_stderr,none": 0.009968964736894263,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3899598393574297,
						"acc_stderr,none": 0.00977634921819301,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41445783132530123,
						"acc_stderr,none": 0.009874311310483544,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3365461847389558,
						"acc_stderr,none": 0.009471423054177138,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.608627639732868,
						"acc_stderr,none": 0.058880111354957666,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5665122435473197,
						"acc_stderr,none": 0.012752771973917615,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7584381204500331,
						"acc_stderr,none": 0.011015033011775258,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6750496360026472,
						"acc_stderr,none": 0.012052798442200205,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5440105890138981,
						"acc_stderr,none": 0.012817182901076037,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5810721376571807,
						"acc_stderr,none": 0.012696855440486893,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6518861681005956,
						"acc_stderr,none": 0.012259084803727355,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5241561879549967,
						"acc_stderr,none": 0.012852100057309605,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.642620780939775,
						"acc_stderr,none": 0.01233256908197468,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5466578424884183,
						"acc_stderr,none": 0.012810980537828164,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5737921906022502,
						"acc_stderr,none": 0.012726223450627896,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6307081403044341,
						"acc_stderr,none": 0.012419685881273582,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7651157563497415,
						"acc_stderr,none": 0.050424154305431894,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8404301075268817,
						"acc_stderr,none": 0.00759640682705417,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6626506024096386,
						"acc_stderr,none": 0.052212602620321284,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6631908237747653,
						"acc_stderr,none": 0.01526962801456709,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7224334600760456,
						"acc_stderr,none": 0.02766507401028683,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6317460317460317,
						"acc_stderr,none": 0.027219500732466703,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7341269841269841,
						"acc_stderr,none": 0.01969875288983336,
						"alias": " - xwinograd_zh"
					}
				}
			},
			"dtype=float16,trust_remote_code=True": {
				"confObj": {
					"dtype": "float16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=float16,trust_remote_code=True",
				"groups": {
					"mmlu": {
						"acc,none": 0.2594359777809429,
						"acc_stderr,none": 0.038721756918878456,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.24867162592986186,
						"acc_stderr,none": 0.03395931821381665,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.26649501126488573,
						"acc_stderr,none": 0.035952550294869795,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.26649333766655836,
						"acc_stderr,none": 0.03568643747433773,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.26165556612749763,
						"acc_stderr,none": 0.04824334124808149,
						"alias": " - stem"
					},
					"truthfulqa": {
						"acc,none": 0.28423052705946994,
						"acc_stderr,none": 0.001034700142833498,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.006119951040391677,
						"bleu_acc_stderr,none": 0.0027302089178066944,
						"bleu_diff,none": -3.163384289607278e-05,
						"bleu_diff_stderr,none": 0.0008504233327829333,
						"bleu_max,none": 0.01577206435664631,
						"bleu_max_stderr,none": 0.002255163843493232,
						"rouge1_acc,none": 0.07466340269277846,
						"rouge1_acc_stderr,none": 0.009201501035844096,
						"rouge1_diff,none": -0.11073781985269729,
						"rouge1_diff_stderr,none": 0.2143049141742196,
						"rouge1_max,none": 2.2721266317542645,
						"rouge1_max_stderr,none": 0.22343075535560017,
						"rouge2_acc,none": 0.0,
						"rouge2_acc_stderr,none": 0.0,
						"rouge2_diff,none": 0.0,
						"rouge2_diff_stderr,none": 0.0,
						"rouge2_max,none": 0.0,
						"rouge2_max_stderr,none": 0.0,
						"rougeL_acc,none": 0.07588739290085679,
						"rougeL_acc_stderr,none": 0.009270479217707212,
						"rougeL_diff,none": -0.1040161327348345,
						"rougeL_diff_stderr,none": 0.21417941840020896,
						"rougeL_max,none": 2.2652744193409764,
						"rougeL_max_stderr,none": 0.22342083854607495
					}
				},
				"results": {
					"arc_challenge": {
						"acc,none": 0.3378839590443686,
						"acc_norm,none": 0.386518771331058,
						"acc_norm_stderr,none": 0.014230084761910471,
						"acc_stderr,none": 0.01382204792228351,
						"alias": "arc_challenge"
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.0,
						"exact_match_stderr,get-answer": 0.0
					},
					"hellaswag": {
						"acc,none": 0.4838677554272057,
						"acc_norm,none": 0.6559450308703445,
						"acc_norm_stderr,none": 0.004740882120999972,
						"acc_stderr,none": 0.004987183560792756,
						"alias": "hellaswag"
					},
					"mmlu": {
						"acc,none": 0.2594359777809429,
						"acc_stderr,none": 0.038721756918878456,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.2518518518518518,
						"acc_stderr,none": 0.03749850709174021,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.19078947368421054,
						"acc_stderr,none": 0.031975658210325,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.30943396226415093,
						"acc_stderr,none": 0.028450154794118627,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.24305555555555555,
						"acc_stderr,none": 0.03586879280080341,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2658959537572254,
						"acc_stderr,none": 0.0336876293225943,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.17647058823529413,
						"acc_stderr,none": 0.0379328118530781,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.33191489361702126,
						"acc_stderr,none": 0.030783736757745657,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2894736842105263,
						"acc_stderr,none": 0.04266339443159394,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.23448275862068965,
						"acc_stderr,none": 0.035306258743465914,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2724867724867725,
						"acc_stderr,none": 0.02293097307163334,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3412698412698413,
						"acc_stderr,none": 0.042407993275749234,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2870967741935484,
						"acc_stderr,none": 0.025736542745594518,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.23645320197044334,
						"acc_stderr,none": 0.029896114291733552,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.042295258468165044,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.296969696969697,
						"acc_stderr,none": 0.03567969772268048,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3181818181818182,
						"acc_stderr,none": 0.03318477333845331,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.24870466321243523,
						"acc_stderr,none": 0.031195840877700293,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.23076923076923078,
						"acc_stderr,none": 0.021362027725222724,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3111111111111111,
						"acc_stderr,none": 0.028226446749683522,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.24789915966386555,
						"acc_stderr,none": 0.028047967224176892,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.1986754966887417,
						"acc_stderr,none": 0.032578473844367746,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.26055045871559634,
						"acc_stderr,none": 0.018819182034850068,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2824074074074074,
						"acc_stderr,none": 0.030701372111510923,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.03039153369274154,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.22362869198312235,
						"acc_stderr,none": 0.027123298205229972,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.2645739910313901,
						"acc_stderr,none": 0.029605103217038325,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.25190839694656486,
						"acc_stderr,none": 0.038073871163060866,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.24867162592986186,
						"acc_stderr,none": 0.03395931821381665,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.15702479338842976,
						"acc_stderr,none": 0.03321244842547129,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.18518518518518517,
						"acc_stderr,none": 0.03755265865037182,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2331288343558282,
						"acc_stderr,none": 0.033220157957767414,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.26785714285714285,
						"acc_stderr,none": 0.04203277291467763,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2912621359223301,
						"acc_stderr,none": 0.04498676320572922,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.02934311479809448,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421276,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.26053639846743293,
						"acc_stderr,none": 0.015696008563807106,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2514450867052023,
						"acc_stderr,none": 0.023357365785874037,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2424581005586592,
						"acc_stderr,none": 0.014333522059217887,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2973856209150327,
						"acc_stderr,none": 0.026173908506718576,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.26649501126488573,
						"acc_stderr,none": 0.035952550294869795,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2797427652733119,
						"acc_stderr,none": 0.025494259350694902,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.26851851851851855,
						"acc_stderr,none": 0.02465968518596729,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25886524822695034,
						"acc_stderr,none": 0.026129572527180848,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.24641460234680573,
						"acc_stderr,none": 0.011005971399927227,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.1875,
						"acc_stderr,none": 0.023709788253811766,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.26633986928104575,
						"acc_stderr,none": 0.0178831881346672,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.33636363636363636,
						"acc_stderr,none": 0.04525393596302505,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2897959183673469,
						"acc_stderr,none": 0.02904308868330434,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.26649333766655836,
						"acc_stderr,none": 0.03568643747433773,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.25870646766169153,
						"acc_stderr,none": 0.030965903123573026,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.26165556612749763,
						"acc_stderr,none": 0.04824334124808149,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.2469879518072289,
						"acc_stderr,none": 0.03357351982064536,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.23976608187134502,
						"acc_stderr,none": 0.032744852119469564,
						"alias": "  - world_religions"
					},
					"truthfulqa": {
						"acc,none": 0.28423052705946994,
						"acc_stderr,none": 0.001034700142833498,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.006119951040391677,
						"bleu_acc_stderr,none": 0.0027302089178066944,
						"bleu_diff,none": -3.163384289607278e-05,
						"bleu_diff_stderr,none": 0.0008504233327829333,
						"bleu_max,none": 0.01577206435664631,
						"bleu_max_stderr,none": 0.002255163843493232,
						"rouge1_acc,none": 0.07466340269277846,
						"rouge1_acc_stderr,none": 0.009201501035844096,
						"rouge1_diff,none": -0.11073781985269729,
						"rouge1_diff_stderr,none": 0.2143049141742196,
						"rouge1_max,none": 2.2721266317542645,
						"rouge1_max_stderr,none": 0.22343075535560017,
						"rouge2_acc,none": 0.0,
						"rouge2_acc_stderr,none": 0.0,
						"rouge2_diff,none": 0.0,
						"rouge2_diff_stderr,none": 0.0,
						"rouge2_max,none": 0.0,
						"rouge2_max_stderr,none": 0.0,
						"rougeL_acc,none": 0.07588739290085679,
						"rougeL_acc_stderr,none": 0.009270479217707212,
						"rougeL_diff,none": -0.1040161327348345,
						"rougeL_diff_stderr,none": 0.21417941840020896,
						"rougeL_max,none": 2.2652744193409764,
						"rougeL_max_stderr,none": 0.22342083854607495
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.006119951040391677,
						"bleu_acc_stderr,none": 0.0027302089178066944,
						"bleu_diff,none": -3.163384289607278e-05,
						"bleu_diff_stderr,none": 0.0008504233327829333,
						"bleu_max,none": 0.01577206435664631,
						"bleu_max_stderr,none": 0.002255163843493232,
						"rouge1_acc,none": 0.07466340269277846,
						"rouge1_acc_stderr,none": 0.009201501035844096,
						"rouge1_diff,none": -0.11073781985269729,
						"rouge1_diff_stderr,none": 0.2143049141742196,
						"rouge1_max,none": 2.2721266317542645,
						"rouge1_max_stderr,none": 0.22343075535560017,
						"rouge2_acc,none": 0.0,
						"rouge2_acc_stderr,none": 0.0,
						"rouge2_diff,none": 0.0,
						"rouge2_diff_stderr,none": 0.0,
						"rouge2_max,none": 0.0,
						"rouge2_max_stderr,none": 0.0,
						"rougeL_acc,none": 0.07588739290085679,
						"rougeL_acc_stderr,none": 0.009270479217707212,
						"rougeL_diff,none": -0.1040161327348345,
						"rougeL_diff_stderr,none": 0.21417941840020896,
						"rougeL_max,none": 2.2652744193409764,
						"rougeL_max_stderr,none": 0.22342083854607495
					},
					"truthfulqa_mc1": {
						"acc,none": 0.22643818849449204,
						"acc_stderr,none": 0.014651337324602574,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3420214636702586,
						"acc_stderr,none": 0.013564000640181784,
						"alias": "truthfulqa_mc2"
					},
					"winogrande": {
						"acc,none": 0.6235201262825573,
						"acc_stderr,none": 0.013616931960667183,
						"alias": "winogrande"
					}
				}
			}
		},
		"name": "RWKV/rwkv-4-world-7b"
	},
	"RWKV/rwkv-5-world-1b5": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5112739571589628,
						"acc_norm,none": 0.49239007891770004,
						"acc_norm_stderr,none": 0.07714758965234145,
						"acc_stderr,none": 0.10622886770015459,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3446875,
						"acc_stderr,none": 0.016201421596492432,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.00465,
						"acc_stderr,none": 0.0047658410308941065,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8336865671641791,
						"acc_stderr,none": 0.15149236838150676,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2384843982169391,
						"acc_norm,none": 0.2384843982169391,
						"acc_norm_stderr,none": 0.11188649523220871,
						"acc_stderr,none": 0.11188649523220871,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.24969780694180624,
						"acc_norm,none": 0.24969780694180624,
						"acc_norm_stderr,none": 0.03784722376131588,
						"acc_stderr,none": 0.03784722376131588,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.327491428145498,
						"likelihood_diff_stderr,none": 0.4719965096495165,
						"pct_stereotype,none": 0.5757304710793082,
						"pct_stereotype_stderr,none": 0.08297235491951933
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.011318897637795276,
						"exact_match_stderr,none": 0.0023473357928725683
					},
					"glue": {
						"acc,none": 0.5410165555026203,
						"acc_stderr,none": 0.012289708247379585,
						"alias": "glue",
						"f1,none": 0.3991229231036883,
						"f1_stderr,none": 0.00018823773677900912,
						"mcc,none": 0.028777377059353095,
						"mcc_stderr,none": 0.029557452442007595
					},
					"kmmlu": {
						"acc,none": 0.22702858792954086,
						"acc_norm,none": 0.22702858792954086,
						"acc_norm_stderr,none": 0.02523919947210493,
						"acc_stderr,none": 0.02523919947210493,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.4992326244244683,
						"acc_norm,none": 0.476,
						"acc_norm_stderr,none": 0.000499847695390778,
						"acc_stderr,none": 0.04277047911125522,
						"alias": "kobest",
						"f1,none": 0.40688220803368735,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6089656510770425,
						"acc_stderr,none": 0.02481100651285048,
						"alias": "lambada",
						"perplexity,none": 6.3688258431378095,
						"perplexity_stderr,none": 0.6778064675853046
					},
					"lambada_cloze": {
						"acc,none": 0.014942751795070833,
						"acc_stderr,none": 0.0017875881094304741,
						"alias": "lambada_cloze",
						"perplexity,none": 900.9697152919758,
						"perplexity_stderr,none": 149.69210596265262
					},
					"lambada_multilingual": {
						"acc,none": 0.4484766155637493,
						"acc_stderr,none": 0.0830249431644644,
						"alias": "lambada_multilingual",
						"perplexity,none": 43.18680498264333,
						"perplexity_stderr,none": 16.58118499444968
					},
					"mmlu": {
						"acc,none": 0.2525993448226748,
						"acc_stderr,none": 0.04202282990456397,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.24017003188097769,
						"acc_stderr,none": 0.02857393482131495,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.25683939491470875,
						"acc_stderr,none": 0.05743915320464653,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.26454338641533964,
						"acc_stderr,none": 0.034586953407146494,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.25531240088804313,
						"acc_stderr,none": 0.04558330291190535,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2895670688431512,
						"acc_norm,none": 0.2580687091661631,
						"acc_norm_stderr,none": 0.00010889608589196277,
						"acc_stderr,none": 0.08132316590829301,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5192857142857142,
						"acc_stderr,none": 0.03007690925789229,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7164605671706539,
						"acc_norm,none": 0.4995181848102748,
						"acc_norm_stderr,none": 0.008337220905567284,
						"acc_stderr,none": 0.14863316206902988,
						"alias": "pythia",
						"bits_per_byte,none": 0.719096605535433,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.646150916185073,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.055848874703582,
						"perplexity_stderr,none": 0.11854541385297362,
						"word_perplexity,none": 14.373441237489386,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.32269503546099293,
						"acc_norm,none": 0.375886524822695,
						"acc_norm_stderr,none": 0.048382895443179384,
						"acc_stderr,none": 0.04948081995880469,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5212804898339489,
						"acc_stderr,none": 0.017321707777982396,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.30632046737222796,
						"acc_stderr,none": 0.0018482852028236226,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31946144430844553,
						"bleu_acc_stderr,none": 0.016322644182960498,
						"bleu_diff,none": -5.354581925951029,
						"bleu_diff_stderr,none": 0.7490692251224637,
						"bleu_max,none": 22.575556120505805,
						"bleu_max_stderr,none": 0.7430338013356429,
						"rouge1_acc,none": 0.29253365973072215,
						"rouge1_acc_stderr,none": 0.015925597445286165,
						"rouge1_diff,none": -7.446877850094821,
						"rouge1_diff_stderr,none": 0.8648739296481254,
						"rouge1_max,none": 46.34984627747259,
						"rouge1_max_stderr,none": 0.8722417823976386,
						"rouge2_acc,none": 0.2350061199510404,
						"rouge2_acc_stderr,none": 0.014843061507731615,
						"rouge2_diff,none": -9.272747045563198,
						"rouge2_diff_stderr,none": 0.9969547067710065,
						"rouge2_max,none": 29.61475975659187,
						"rouge2_max_stderr,none": 0.9794499444707578,
						"rougeL_acc,none": 0.28518971848225216,
						"rougeL_acc_stderr,none": 0.015805827874454892,
						"rougeL_diff,none": -7.629525541962505,
						"rougeL_diff_stderr,none": 0.8694041166189354,
						"rougeL_max,none": 43.67019975982433,
						"rougeL_max_stderr,none": 0.8827245562177154
					},
					"xcopa": {
						"acc,none": 0.5789090909090909,
						"acc_stderr,none": 0.04463456332011875,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4044979919678715,
						"acc_stderr,none": 0.04620022346504284,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5785452138860477,
						"acc_stderr,none": 0.05501404774447183,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7302764666217127,
						"acc_stderr,none": 0.044406120682486394,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5112739571589628,
						"acc_norm,none": 0.49239007891770004,
						"acc_norm_stderr,none": 0.07714758965234145,
						"acc_stderr,none": 0.10622886770015459,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3446875,
						"acc_stderr,none": 0.016201421596492432,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.01516792886540756,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.014876872027456727,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3458333333333333,
						"acc_stderr,none": 0.013736245342311012,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.28668941979522183,
						"acc_norm,none": 0.3302047781569966,
						"acc_norm_stderr,none": 0.013743085603760427,
						"acc_stderr,none": 0.013214986329274779,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6220538720538721,
						"acc_norm,none": 0.5723905723905723,
						"acc_norm_stderr,none": 0.010151683397430682,
						"acc_stderr,none": 0.009949405744045459,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.00465,
						"acc_stderr,none": 0.0047658410308941065,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.002,
						"acc_stderr,none": 0.0009992493430695038,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.022,
						"acc_stderr,none": 0.0032807593162018913,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.011,
						"acc_stderr,none": 0.002332856855993376,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.0007069298939339458,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.009,
						"acc_stderr,none": 0.002112280962711327,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000151,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.0007069298939339423,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0008676789587852494,
						"acc_stderr,none": 0.000613408514134382,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8336865671641791,
						"acc_stderr,none": 0.15149236838150676,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662727,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045057,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.00223158687484488,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.012559527926707366,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996695,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.781,
						"acc_stderr,none": 0.013084731950262026,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.642,
						"acc_stderr,none": 0.015167928865407559,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.013807775152234195,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475294,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.001413505570557816,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.002987963843142644,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426109,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427419,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.005893957816165545,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557425,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333454,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910637,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696844,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.796,
						"acc_stderr,none": 0.012749374359024398,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.012559527926707377,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357796,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904635,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.842,
						"acc_stderr,none": 0.011539894677559552,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767667,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.274,
						"acc_stderr,none": 0.014111099288259587,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315148,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.827,
						"acc_stderr,none": 0.011967214137559926,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.014526080235459548,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.01104345769937823,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942307,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697589,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.456,
						"acc_stderr,none": 0.01575792855397917,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665546,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.014385511563477341,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.015663503610155283,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.662,
						"acc_stderr,none": 0.01496596071022448,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410037,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.014111099288259587,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662734,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400243,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.011628164696727191,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.005814534272734976,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.002637794146243775,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248123,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.01360735683959812,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.469,
						"acc_stderr,none": 0.015788865959539006,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695456,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866447,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030127,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.01351231225892086,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.015740004693383845,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475282,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.746,
						"acc_stderr,none": 0.013772206565168543,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.014356395999905687,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946097,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724454,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.759,
						"acc_stderr,none": 0.013531522534515441,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.864,
						"acc_stderr,none": 0.010845350230472988,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163044,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745902,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656799,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389627,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.466,
						"acc_stderr,none": 0.015782683329937628,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.397,
						"acc_stderr,none": 0.015480007449307989,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.5140672782874618,
						"acc_stderr,none": 0.0087415932027706,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.06297362289056342,
						"alias": "cb",
						"f1,none": 0.22987012987012986,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2384843982169391,
						"acc_norm,none": 0.2384843982169391,
						"acc_norm_stderr,none": 0.11188649523220871,
						"acc_stderr,none": 0.11188649523220871,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275461,
						"acc_stderr,none": 0.08124094920275461,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736841,
						"acc_stderr,none": 0.05263157894736841,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.07872958216222171,
						"acc_stderr,none": 0.07872958216222171,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764435,
						"acc_stderr,none": 0.09361833424764435,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.3191489361702128,
						"acc_norm,none": 0.3191489361702128,
						"acc_norm_stderr,none": 0.0687296045180637,
						"acc_stderr,none": 0.0687296045180637,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.09090909090909091,
						"acc_norm,none": 0.09090909090909091,
						"acc_norm_stderr,none": 0.06273323266748675,
						"acc_stderr,none": 0.06273323266748675,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.06288639360110458,
						"acc_stderr,none": 0.06288639360110458,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.12903225806451613,
						"acc_norm,none": 0.12903225806451613,
						"acc_norm_stderr,none": 0.06120537406777508,
						"acc_stderr,none": 0.06120537406777508,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.45161290322580644,
						"acc_norm,none": 0.45161290322580644,
						"acc_norm_stderr,none": 0.09085862440549507,
						"acc_stderr,none": 0.09085862440549507,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09933992677987828,
						"acc_stderr,none": 0.09933992677987828,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.060073850409370216,
						"acc_stderr,none": 0.060073850409370216,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.13636363636363635,
						"acc_norm,none": 0.13636363636363635,
						"acc_norm_stderr,none": 0.07488677009526491,
						"acc_stderr,none": 0.07488677009526491,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755131,
						"acc_stderr,none": 0.08780518530755131,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.11236664374387367,
						"acc_stderr,none": 0.11236664374387367,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.16326530612244897,
						"acc_norm,none": 0.16326530612244897,
						"acc_norm_stderr,none": 0.053348255582850765,
						"acc_stderr,none": 0.053348255582850765,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.07102933373079214,
						"acc_stderr,none": 0.07102933373079214,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.2391304347826087,
						"acc_norm,none": 0.2391304347826087,
						"acc_norm_stderr,none": 0.06358669845936323,
						"acc_stderr,none": 0.06358669845936323,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.24969780694180624,
						"acc_norm,none": 0.24969780694180624,
						"acc_norm_stderr,none": 0.03784722376131588,
						"acc_stderr,none": 0.03784722376131588,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.21893491124260356,
						"acc_norm,none": 0.21893491124260356,
						"acc_norm_stderr,none": 0.03190409884491232,
						"acc_stderr,none": 0.03190409884491232,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.0353866849031339,
						"acc_stderr,none": 0.0353866849031339,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.03494959016177541,
						"acc_stderr,none": 0.03494959016177541,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.033464098810559534,
						"acc_stderr,none": 0.033464098810559534,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.22009569377990432,
						"acc_norm,none": 0.22009569377990432,
						"acc_norm_stderr,none": 0.028727297002576892,
						"acc_stderr,none": 0.028727297002576892,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2748091603053435,
						"acc_norm,none": 0.2748091603053435,
						"acc_norm_stderr,none": 0.03915345408847837,
						"acc_stderr,none": 0.03915345408847837,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2867647058823529,
						"acc_norm,none": 0.2867647058823529,
						"acc_norm_stderr,none": 0.038923544178637824,
						"acc_stderr,none": 0.038923544178637824,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2336448598130841,
						"acc_norm,none": 0.2336448598130841,
						"acc_norm_stderr,none": 0.04109984842463997,
						"acc_stderr,none": 0.04109984842463997,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.24458204334365324,
						"acc_norm,none": 0.24458204334365324,
						"acc_norm_stderr,none": 0.023953997540932172,
						"acc_stderr,none": 0.023953997540932172,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604257,
						"acc_stderr,none": 0.030587591351604257,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2122905027932961,
						"acc_norm,none": 0.2122905027932961,
						"acc_norm_stderr,none": 0.030650553564393286,
						"acc_stderr,none": 0.030650553564393286,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422647,
						"acc_stderr,none": 0.028146970599422647,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.04084247315337099,
						"acc_stderr,none": 0.04084247315337099,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.04556837693674772,
						"acc_stderr,none": 0.04556837693674772,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04186091791394607,
						"acc_stderr,none": 0.04186091791394607,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.043960933774393765,
						"acc_stderr,none": 0.043960933774393765,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2564102564102564,
						"acc_norm,none": 0.2564102564102564,
						"acc_norm_stderr,none": 0.02647585170669971,
						"acc_stderr,none": 0.02647585170669971,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604246,
						"acc_stderr,none": 0.030587591351604246,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.28654970760233917,
						"acc_norm,none": 0.28654970760233917,
						"acc_norm_stderr,none": 0.03467826685703826,
						"acc_stderr,none": 0.03467826685703826,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.272108843537415,
						"acc_norm,none": 0.272108843537415,
						"acc_norm_stderr,none": 0.036832239154550236,
						"acc_stderr,none": 0.036832239154550236,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2158273381294964,
						"acc_norm,none": 0.2158273381294964,
						"acc_norm_stderr,none": 0.03502027344986235,
						"acc_stderr,none": 0.03502027344986235,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.25157232704402516,
						"acc_norm,none": 0.25157232704402516,
						"acc_norm_stderr,none": 0.03452055811164904,
						"acc_stderr,none": 0.03452055811164904,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25766871165644173,
						"acc_norm,none": 0.25766871165644173,
						"acc_norm_stderr,none": 0.03436150827846917,
						"acc_stderr,none": 0.03436150827846917,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2441860465116279,
						"acc_norm,none": 0.2441860465116279,
						"acc_norm_stderr,none": 0.03285260554707745,
						"acc_stderr,none": 0.03285260554707745,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.02688368747322085,
						"acc_stderr,none": 0.02688368747322085,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.23232323232323232,
						"acc_norm,none": 0.23232323232323232,
						"acc_norm_stderr,none": 0.030088629490217483,
						"acc_stderr,none": 0.030088629490217483,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.27310924369747897,
						"acc_norm,none": 0.27310924369747897,
						"acc_norm_stderr,none": 0.028942004040998167,
						"acc_stderr,none": 0.028942004040998167,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.029017133559381268,
						"acc_stderr,none": 0.029017133559381268,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.038532548365520024,
						"acc_stderr,none": 0.038532548365520024,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.26573426573426573,
						"acc_norm,none": 0.26573426573426573,
						"acc_norm_stderr,none": 0.03706860462623559,
						"acc_stderr,none": 0.03706860462623559,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032732683535398856,
						"acc_stderr,none": 0.032732683535398856,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2348993288590604,
						"acc_norm,none": 0.2348993288590604,
						"acc_norm_stderr,none": 0.03484731504650188,
						"acc_stderr,none": 0.03484731504650188,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.03279317792268948,
						"acc_stderr,none": 0.03279317792268948,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.03980329854920432,
						"acc_stderr,none": 0.03980329854920432,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.033047561588107864,
						"acc_stderr,none": 0.033047561588107864,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.041723430387053825,
						"acc_stderr,none": 0.041723430387053825,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.03535681229053242,
						"acc_stderr,none": 0.03535681229053242,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.03809523809523811,
						"acc_stderr,none": 0.03809523809523811,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.03186439492581516,
						"acc_stderr,none": 0.03186439492581516,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.23255813953488372,
						"acc_norm,none": 0.23255813953488372,
						"acc_norm_stderr,none": 0.0323065408320345,
						"acc_stderr,none": 0.0323065408320345,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.24817518248175183,
						"acc_norm,none": 0.24817518248175183,
						"acc_norm_stderr,none": 0.021332687690541908,
						"acc_stderr,none": 0.021332687690541908,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.24766355140186916,
						"acc_norm,none": 0.24766355140186916,
						"acc_norm_stderr,none": 0.029576535293164476,
						"acc_stderr,none": 0.029576535293164476,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.21951219512195122,
						"acc_norm,none": 0.21951219512195122,
						"acc_norm_stderr,none": 0.037474208760847595,
						"acc_stderr,none": 0.037474208760847595,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2459016393442623,
						"acc_norm,none": 0.2459016393442623,
						"acc_norm_stderr,none": 0.03914731903595733,
						"acc_stderr,none": 0.03914731903595733,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24285714285714285,
						"acc_norm,none": 0.24285714285714285,
						"acc_norm_stderr,none": 0.02966137041396584,
						"acc_stderr,none": 0.02966137041396584,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03305282343736876,
						"acc_stderr,none": 0.03305282343736876,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03196107138009966,
						"acc_stderr,none": 0.03196107138009966,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.23275862068965517,
						"acc_norm,none": 0.23275862068965517,
						"acc_norm_stderr,none": 0.039406691683376995,
						"acc_stderr,none": 0.039406691683376995,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2896551724137931,
						"acc_norm,none": 0.2896551724137931,
						"acc_norm_stderr,none": 0.03780019230438014,
						"acc_stderr,none": 0.03780019230438014,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.20952380952380953,
						"acc_norm,none": 0.20952380952380953,
						"acc_norm_stderr,none": 0.039906571509931855,
						"acc_stderr,none": 0.039906571509931855,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.03183348654463748,
						"acc_stderr,none": 0.03183348654463748,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.03011304016776726,
						"acc_stderr,none": 0.03011304016776726,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.23670212765957446,
						"acc_norm,none": 0.23670212765957446,
						"acc_norm_stderr,none": 0.021949896304751585,
						"acc_stderr,none": 0.021949896304751585,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.23275862068965517,
						"acc_norm,none": 0.23275862068965517,
						"acc_norm_stderr,none": 0.02780436020996173,
						"acc_stderr,none": 0.02780436020996173,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.032534138484822554,
						"acc_stderr,none": 0.032534138484822554,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.02895216745089081,
						"acc_stderr,none": 0.02895216745089081,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03453131801885415,
						"acc_stderr,none": 0.03453131801885415,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336699,
						"acc_stderr,none": 0.03333068663336699,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2795031055900621,
						"acc_norm,none": 0.2795031055900621,
						"acc_norm_stderr,none": 0.035477203909303916,
						"acc_stderr,none": 0.035477203909303916,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2125,
						"acc_norm,none": 0.2125,
						"acc_norm_stderr,none": 0.03244189290245473,
						"acc_stderr,none": 0.03244189290245473,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.028777377059353095,
						"mcc_stderr,none": 0.029557452442007595
					},
					"copa": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.04292346959909284,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.327491428145498,
						"likelihood_diff_stderr,none": 0.4719965096495165,
						"pct_stereotype,none": 0.5757304710793082,
						"pct_stereotype_stderr,none": 0.08297235491951933
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.463700059630292,
						"likelihood_diff_stderr,none": 0.0845631693594068,
						"pct_stereotype,none": 0.6064400715563506,
						"pct_stereotype_stderr,none": 0.01193334989005588
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.7637362637362637,
						"likelihood_diff_stderr,none": 0.42775738842728434,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.8522727272727275,
						"likelihood_diff_stderr,none": 2.0415303265884064,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.1826923076923075,
						"likelihood_diff_stderr,none": 0.6039799199584519,
						"pct_stereotype,none": 0.6615384615384615,
						"pct_stereotype_stderr,none": 0.059148294227806535
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.4078125,
						"likelihood_diff_stderr,none": 0.15069166775212786,
						"pct_stereotype,none": 0.634375,
						"pct_stereotype_stderr,none": 0.026964702306061943
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.2453703703703702,
						"likelihood_diff_stderr,none": 0.2217040933399034,
						"pct_stereotype,none": 0.5231481481481481,
						"pct_stereotype_stderr,none": 0.03406315360711507
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.4322916666666665,
						"likelihood_diff_stderr,none": 0.3002279053708052,
						"pct_stereotype,none": 0.7222222222222222,
						"pct_stereotype_stderr,none": 0.053156331218399945
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.3390748031496065,
						"likelihood_diff_stderr,none": 0.14670385441739597,
						"pct_stereotype,none": 0.48031496062992124,
						"pct_stereotype_stderr,none": 0.022188563396746394
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.6779279279279278,
						"likelihood_diff_stderr,none": 0.3691446688239786,
						"pct_stereotype,none": 0.7567567567567568,
						"pct_stereotype_stderr,none": 0.0409074307386092
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.397849462365591,
						"likelihood_diff_stderr,none": 0.4110965322159284,
						"pct_stereotype,none": 0.8494623655913979,
						"pct_stereotype_stderr,none": 0.03728212869390004
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.029605263157895,
						"likelihood_diff_stderr,none": 0.22679129119776745,
						"pct_stereotype,none": 0.6789473684210526,
						"pct_stereotype_stderr,none": 0.03396059335824887
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.1943947525342873,
						"likelihood_diff_stderr,none": 0.07716304366492163,
						"pct_stereotype,none": 0.5456171735241503,
						"pct_stereotype_stderr,none": 0.012162363046239631
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.2083333333333335,
						"likelihood_diff_stderr,none": 0.2905677532452071,
						"pct_stereotype,none": 0.5,
						"pct_stereotype_stderr,none": 0.052999894000318
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.826923076923077,
						"likelihood_diff_stderr,none": 0.8341588613190896,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.14044168141158106
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.768939393939394,
						"likelihood_diff_stderr,none": 0.4243914677419997,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.0584705346204686
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.7955607476635516,
						"likelihood_diff_stderr,none": 0.14919853541464037,
						"pct_stereotype,none": 0.5077881619937694,
						"pct_stereotype_stderr,none": 0.02794745876935634
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.219367588932806,
						"likelihood_diff_stderr,none": 0.195350459451411,
						"pct_stereotype,none": 0.34782608695652173,
						"pct_stereotype_stderr,none": 0.030002850406189333
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.361111111111111,
						"likelihood_diff_stderr,none": 0.4441876214153266,
						"pct_stereotype,none": 0.625,
						"pct_stereotype_stderr,none": 0.05745481997211521
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.8440217391304348,
						"likelihood_diff_stderr,none": 0.14553636283089622,
						"pct_stereotype,none": 0.48695652173913045,
						"pct_stereotype_stderr,none": 0.023330058952084724
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 2.876086956521739,
						"likelihood_diff_stderr,none": 0.2574106436134579,
						"pct_stereotype,none": 0.7478260869565218,
						"pct_stereotype_stderr,none": 0.04067222754154718
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.4134615384615383,
						"likelihood_diff_stderr,none": 0.270854058855428,
						"pct_stereotype,none": 0.8131868131868132,
						"pct_stereotype_stderr,none": 0.04108446855035881
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.0404974489795915,
						"likelihood_diff_stderr,none": 0.28319103682359836,
						"pct_stereotype,none": 0.6836734693877551,
						"pct_stereotype_stderr,none": 0.033302348931020055
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.011318897637795276,
						"exact_match_stderr,none": 0.0023473357928725683
					},
					"glue": {
						"acc,none": 0.5410165555026203,
						"acc_stderr,none": 0.012289708247379585,
						"alias": "glue",
						"f1,none": 0.3991229231036883,
						"f1_stderr,none": 0.00018823773677900912,
						"mcc,none": 0.028777377059353095,
						"mcc_stderr,none": 0.029557452442007595
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.0075815011372251705,
						"exact_match_stderr,get-answer": 0.0023892815120772543
					},
					"hellaswag": {
						"acc,none": 0.42471619199362676,
						"acc_norm,none": 0.5502887870942044,
						"acc_norm_stderr,none": 0.004964479324552535,
						"acc_stderr,none": 0.004932896472460566,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.22702858792954086,
						"acc_norm,none": 0.22702858792954086,
						"acc_norm_stderr,none": 0.02523919947210493,
						"acc_stderr,none": 0.02523919947210493,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909284,
						"acc_stderr,none": 0.04292346959909284,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314121,
						"acc_stderr,none": 0.013644675781314121,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.228,
						"acc_norm,none": 0.228,
						"acc_norm_stderr,none": 0.01327374070080447,
						"acc_stderr,none": 0.01327374070080447,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.01356964019917745,
						"acc_stderr,none": 0.01356964019917745,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.01732412216192008,
						"acc_stderr,none": 0.01732412216192008,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.17,
						"acc_norm,none": 0.17,
						"acc_norm_stderr,none": 0.01188449583454166,
						"acc_stderr,none": 0.01188449583454166,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.222,
						"acc_norm,none": 0.222,
						"acc_norm_stderr,none": 0.013148721948877364,
						"acc_stderr,none": 0.013148721948877364,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.197,
						"acc_norm,none": 0.197,
						"acc_norm_stderr,none": 0.012583693787968121,
						"acc_stderr,none": 0.012583693787968121,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.028085923439997284,
						"acc_stderr,none": 0.028085923439997284,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.013314551335935936,
						"acc_stderr,none": 0.013314551335935936,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.27692307692307694,
						"acc_norm,none": 0.27692307692307694,
						"acc_norm_stderr,none": 0.03939825345266469,
						"acc_stderr,none": 0.03939825345266469,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.040936018074033256,
						"acc_stderr,none": 0.040936018074033256,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.013414729030247118,
						"acc_stderr,none": 0.013414729030247118,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.224,
						"acc_norm,none": 0.224,
						"acc_norm_stderr,none": 0.013190830072364462,
						"acc_stderr,none": 0.013190830072364462,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568196,
						"acc_stderr,none": 0.014029819522568196,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.187,
						"acc_norm,none": 0.187,
						"acc_norm_stderr,none": 0.012336254828074137,
						"acc_stderr,none": 0.012336254828074137,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.238,
						"acc_norm,none": 0.238,
						"acc_norm_stderr,none": 0.01347358666196723,
						"acc_stderr,none": 0.01347358666196723,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.013550631705555968,
						"acc_stderr,none": 0.013550631705555968,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.01360735683959812,
						"acc_stderr,none": 0.01360735683959812,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.012997843819031825,
						"acc_stderr,none": 0.012997843819031825,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816506,
						"acc_stderr,none": 0.04229525846816506,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.208,
						"acc_norm,none": 0.208,
						"acc_norm_stderr,none": 0.012841374572096926,
						"acc_stderr,none": 0.012841374572096926,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.013772206565168543,
						"acc_stderr,none": 0.013772206565168543,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.208,
						"acc_norm,none": 0.208,
						"acc_norm_stderr,none": 0.01284137457209692,
						"acc_stderr,none": 0.01284137457209692,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.012997843819031832,
						"acc_stderr,none": 0.012997843819031832,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.013394902889660009,
						"acc_stderr,none": 0.013394902889660009,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.227,
						"acc_norm,none": 0.227,
						"acc_norm_stderr,none": 0.013253174964763933,
						"acc_stderr,none": 0.013253174964763933,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.01745014362464865,
						"acc_stderr,none": 0.01745014362464865,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.229,
						"acc_norm,none": 0.229,
						"acc_norm_stderr,none": 0.013294199326613606,
						"acc_stderr,none": 0.013294199326613606,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.236,
						"acc_norm,none": 0.236,
						"acc_norm_stderr,none": 0.013434451402438702,
						"acc_stderr,none": 0.013434451402438702,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.176,
						"acc_norm,none": 0.176,
						"acc_norm_stderr,none": 0.012048616898597507,
						"acc_stderr,none": 0.012048616898597507,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.229,
						"acc_norm,none": 0.229,
						"acc_norm_stderr,none": 0.013294199326613614,
						"acc_stderr,none": 0.013294199326613614,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23333333333333334,
						"acc_norm,none": 0.23333333333333334,
						"acc_norm_stderr,none": 0.0244599795235114,
						"acc_stderr,none": 0.0244599795235114,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.01396316475480995,
						"acc_stderr,none": 0.01396316475480995,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.218,
						"acc_norm,none": 0.218,
						"acc_norm_stderr,none": 0.013063179040595306,
						"acc_stderr,none": 0.013063179040595306,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.226,
						"acc_norm,none": 0.226,
						"acc_norm_stderr,none": 0.013232501619085334,
						"acc_stderr,none": 0.013232501619085334,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.027234326551496862,
						"acc_stderr,none": 0.027234326551496862,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.213,
						"acc_norm,none": 0.213,
						"acc_norm_stderr,none": 0.01295371756673723,
						"acc_stderr,none": 0.01295371756673723,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.013569640199177438,
						"acc_stderr,none": 0.013569640199177438,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.205,
						"acc_norm,none": 0.205,
						"acc_norm_stderr,none": 0.028617649261360185,
						"acc_stderr,none": 0.028617649261360185,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.013493000446937591,
						"acc_stderr,none": 0.013493000446937591,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.4992326244244683,
						"acc_norm,none": 0.476,
						"acc_norm_stderr,none": 0.000499847695390778,
						"acc_stderr,none": 0.04277047911125522,
						"alias": "kobest",
						"f1,none": 0.40688220803368735,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5142450142450142,
						"acc_stderr,none": 0.013343348923385135,
						"alias": " - kobest_boolq",
						"f1,none": 0.39937524306557437,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.01571976816340209,
						"alias": " - kobest_copa",
						"f1,none": 0.5552884615384615,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.364,
						"acc_norm,none": 0.476,
						"acc_norm_stderr,none": 0.0223572738810164,
						"acc_stderr,none": 0.021539170637317695,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3617369279761672,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5088161209068011,
						"acc_stderr,none": 0.025122039300513738,
						"alias": " - kobest_sentineg",
						"f1,none": 0.3668261562998405,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6089656510770425,
						"acc_stderr,none": 0.02481100651285048,
						"alias": "lambada",
						"perplexity,none": 6.3688258431378095,
						"perplexity_stderr,none": 0.6778064675853046
					},
					"lambada_cloze": {
						"acc,none": 0.014942751795070833,
						"acc_stderr,none": 0.0017875881094304741,
						"alias": "lambada_cloze",
						"perplexity,none": 900.9697152919758,
						"perplexity_stderr,none": 149.69210596265262
					},
					"lambada_multilingual": {
						"acc,none": 0.4484766155637493,
						"acc_stderr,none": 0.0830249431644644,
						"alias": "lambada_multilingual",
						"perplexity,none": 43.18680498264333,
						"perplexity_stderr,none": 16.58118499444968
					},
					"lambada_openai": {
						"acc,none": 0.6568988938482437,
						"acc_stderr,none": 0.00661412498246103,
						"alias": " - lambada_openai",
						"perplexity,none": 5.055848874703582,
						"perplexity_stderr,none": 0.11854541385297362
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.016107122064816612,
						"acc_stderr,none": 0.0017538601328517046,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 608.2435963507144,
						"perplexity_stderr,none": 22.37989558879136
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.35066951290510384,
						"acc_stderr,none": 0.006648045374603887,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 65.82972989107675,
						"perplexity_stderr,none": 3.9571956126281833
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6567048321366195,
						"acc_stderr,none": 0.00661501790443367,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 5.056405351554518,
						"perplexity_stderr,none": 0.11860916891457675
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.37104599262565496,
						"acc_stderr,none": 0.006730314981342215,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 61.249035187327245,
						"perplexity_stderr,none": 3.3251943349532094
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.44944692412187076,
						"acc_stderr,none": 0.006930281504471643,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 34.89400012412681,
						"perplexity_stderr,none": 1.8764986780815518
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.4145158160294974,
						"acc_stderr,none": 0.006863414211397148,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 48.90485435913133,
						"perplexity_stderr,none": 2.8348284694345787
					},
					"lambada_standard": {
						"acc,none": 0.5612264700174655,
						"acc_stderr,none": 0.006913553944132544,
						"alias": " - lambada_standard",
						"perplexity,none": 7.681025721290132,
						"perplexity_stderr,none": 0.20919094987359504
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.013778381525325054,
						"acc_stderr,none": 0.0016240464072475183,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 1193.6958342332373,
						"perplexity_stderr,none": 38.29277175867896
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.23918575063613232,
						"exact_match_stderr,get-answer": 0.010762641593043935
					},
					"logiqa": {
						"acc,none": 0.2457757296466974,
						"acc_norm,none": 0.29493087557603687,
						"acc_norm_stderr,none": 0.01788624973410439,
						"acc_stderr,none": 0.016887410894296944,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.23982188295165394,
						"acc_norm,none": 0.2856234096692112,
						"acc_norm_stderr,none": 0.011396524130843131,
						"acc_stderr,none": 0.010772437759520095,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.23819095477386934,
						"acc_norm,none": 0.2539363484087102,
						"acc_norm_stderr,none": 0.007968030108429293,
						"acc_stderr,none": 0.007798054851247481,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.4451387417919932,
						"acc_stderr,none": 0.005114826414232715,
						"alias": "mc_taco",
						"f1,none": 0.49697551608257323,
						"f1_stderr,none": 0.006011009960072317
					},
					"medmcqa": {
						"acc,none": 0.26918479560124314,
						"acc_norm,none": 0.26918479560124314,
						"acc_norm_stderr,none": 0.006858624646857021,
						"acc_stderr,none": 0.006858624646857021,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.23880597014925373,
						"acc_norm,none": 0.23880597014925373,
						"acc_norm_stderr,none": 0.011954370755725674,
						"acc_stderr,none": 0.011954370755725674,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.2525993448226748,
						"acc_stderr,none": 0.04202282990456397,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.22962962962962963,
						"acc_stderr,none": 0.03633384414073463,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.24342105263157895,
						"acc_stderr,none": 0.034923496688842384,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.32075471698113206,
						"acc_stderr,none": 0.028727502957880263,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.03745554791462457,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.047609522856952344,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.042295258468165044,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3236994219653179,
						"acc_stderr,none": 0.03567603799639171,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.04389869956808778,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322674,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.18723404255319148,
						"acc_stderr,none": 0.025501588341883607,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2719298245614035,
						"acc_stderr,none": 0.04185774424022056,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.23448275862068965,
						"acc_stderr,none": 0.035306258743465914,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.023517294335963276,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.040406101782088394,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036845,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2903225806451613,
						"acc_stderr,none": 0.025822106119415895,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.22167487684729065,
						"acc_stderr,none": 0.029225575892489614,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.23030303030303031,
						"acc_stderr,none": 0.03287666758603489,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.03358618145732524,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.27461139896373055,
						"acc_stderr,none": 0.032210245080411544,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.258974358974359,
						"acc_stderr,none": 0.022211106810061665,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2518518518518518,
						"acc_stderr,none": 0.02646611753895991,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2605042016806723,
						"acc_stderr,none": 0.028510251512341937,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.27155963302752295,
						"acc_stderr,none": 0.019069098363191445,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.029886910547626964,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.27941176470588236,
						"acc_stderr,none": 0.031493281045079556,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2489451476793249,
						"acc_stderr,none": 0.028146970599422644,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.16143497757847533,
						"acc_stderr,none": 0.024693957899128472,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.21374045801526717,
						"acc_stderr,none": 0.0359546161177469,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.24017003188097769,
						"acc_stderr,none": 0.02857393482131495,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.17355371900826447,
						"acc_stderr,none": 0.0345727283691767,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04186091791394607,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.25153374233128833,
						"acc_stderr,none": 0.034089978868575295,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.19642857142857142,
						"acc_stderr,none": 0.03770970049347019,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.39805825242718446,
						"acc_stderr,none": 0.04846748253977239,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2094017094017094,
						"acc_stderr,none": 0.026655699653922754,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621505,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.22349936143039592,
						"acc_stderr,none": 0.014897235229450707,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.21098265895953758,
						"acc_stderr,none": 0.021966309947043124,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2346368715083799,
						"acc_stderr,none": 0.014173044098303679,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.30718954248366015,
						"acc_stderr,none": 0.026415601914388992,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.25683939491470875,
						"acc_stderr,none": 0.05743915320464653,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2540192926045016,
						"acc_stderr,none": 0.02472386150477169,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.023132376234543346,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.24822695035460993,
						"acc_stderr,none": 0.025770015644290396,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.24967405475880053,
						"acc_stderr,none": 0.011054538377832327,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.25735294117647056,
						"acc_stderr,none": 0.026556519470041524,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.24183006535947713,
						"acc_stderr,none": 0.017322789207784326,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.24545454545454545,
						"acc_stderr,none": 0.041220665028782834,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2612244897959184,
						"acc_stderr,none": 0.028123429335142787,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.26454338641533964,
						"acc_stderr,none": 0.034586953407146494,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.3034825870646766,
						"acc_stderr,none": 0.03251006816458618,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.25531240088804313,
						"acc_stderr,none": 0.04558330291190535,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.21686746987951808,
						"acc_stderr,none": 0.03208284450356365,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.19883040935672514,
						"acc_stderr,none": 0.03061111655743253,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.349872643912379,
						"acc_stderr,none": 0.004814278243995993,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.34662327095199347,
						"acc_stderr,none": 0.004799675113044456,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.36519607843137253,
						"acc_stderr,none": 0.023866330396788003,
						"alias": "mrpc",
						"f1,none": 0.24489795918367346,
						"f1_stderr,none": 0.03069881419470773
					},
					"multimedqa": {
						"acc,none": 0.2895670688431512,
						"acc_norm,none": 0.2580687091661631,
						"acc_norm_stderr,none": 0.00010889608589196277,
						"acc_stderr,none": 0.08132316590829301,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.43028052805280526,
						"acc_stderr,none": 0.00711164170549595,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6797404081032992,
						"mrr_stderr,none": 0.010364448745151772,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4221218961625282,
						"r@2_stderr,none": 0.016602191705517567
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6337471802468763,
						"mrr_stderr,none": 0.010405616118412488,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.463882618510158,
						"r@2_stderr,none": 0.016763409667403396
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.00221606648199446,
						"exact_match_stderr,remove_whitespace": 0.0007827376152844447
					},
					"openbookqa": {
						"acc,none": 0.254,
						"acc_norm,none": 0.354,
						"acc_norm_stderr,none": 0.021407582047916447,
						"acc_stderr,none": 0.01948659680164338,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4855,
						"acc_stderr,none": 0.011178432523249468,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4545,
						"acc_stderr,none": 0.011136735987003724,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.533,
						"acc_stderr,none": 0.011158752568250671,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.011130400617630758,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.557,
						"acc_stderr,none": 0.011110230358066702,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5205,
						"acc_stderr,none": 0.011173732641806813,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.011154111668060216,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5192857142857142,
						"acc_stderr,none": 0.03007690925789229,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7110990206746464,
						"acc_norm,none": 0.7132752992383025,
						"acc_norm_stderr,none": 0.010551314503108066,
						"acc_stderr,none": 0.010575111841364905,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.21824295473953886,
						"acc_norm,none": 0.26147523484201535,
						"acc_norm_stderr,none": 0.003210487355255146,
						"acc_stderr,none": 0.003017721587333072,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.021966635293832918,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7164605671706539,
						"acc_norm,none": 0.4995181848102748,
						"acc_norm_stderr,none": 0.008337220905567284,
						"acc_stderr,none": 0.14863316206902988,
						"alias": "pythia",
						"bits_per_byte,none": 0.719096605535433,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.646150916185073,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.055848874703582,
						"perplexity_stderr,none": 0.11854541385297362,
						"word_perplexity,none": 14.373441237489386,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.32269503546099293,
						"acc_norm,none": 0.375886524822695,
						"acc_norm_stderr,none": 0.048382895443179384,
						"acc_stderr,none": 0.04948081995880469,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4083333333333333,
						"acc_norm,none": 0.49166666666666664,
						"acc_norm_stderr,none": 0.045828558447483604,
						"acc_stderr,none": 0.045058059858031296,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.23125,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.03782614981812041,
						"acc_stderr,none": 0.03343758265727744,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3380281690140845,
						"acc_norm,none": 0.3415492957746479,
						"acc_norm_stderr,none": 0.028190002383528694,
						"acc_stderr,none": 0.028119201465363827,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5052169137836353,
						"acc_stderr,none": 0.006765042284363289,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.6368538214197378,
						"acc_stderr,none": 0.0023917408386164144,
						"alias": "qqp",
						"f1,none": 0.40039206076941924,
						"f1_stderr,none": 0.003951730640353965
					},
					"race": {
						"acc,none": 0.3397129186602871,
						"acc_stderr,none": 0.0146579144325864,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.254,
						"em_stderr,none": 0.004353193658626019,
						"f1,none": 0.26163523828089236,
						"f1_stderr,none": 0.004364439540718011
					},
					"rte": {
						"acc,none": 0.51985559566787,
						"acc_stderr,none": 0.030072723167317184,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.897,
						"acc_norm,none": 0.853,
						"acc_norm_stderr,none": 0.011203415395160333,
						"acc_stderr,none": 0.009616833339695794,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.51985559566787,
						"acc_stderr,none": 0.030072723167317184,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.7545871559633027,
						"acc_stderr,none": 0.014581233713232339,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.528741377586724,
						"acc_norm,none": 0.7191342597220833,
						"acc_norm_stderr,none": 0.0031774966073351524,
						"acc_stderr,none": 0.0035292467702086514,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5212804898339489,
						"acc_stderr,none": 0.017321707777982396,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5005008012820513,
						"acc_stderr,none": 0.005004252916283736,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.5644066078848687,
						"acc_stderr,none": 0.004991902919308554,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.4999019607843137,
						"acc_stderr,none": 0.0049509803207759065,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"triviaqa": {
						"alias": "triviaqa",
						"exact_match,remove_whitespace": 0.006966116807846634,
						"exact_match_stderr,remove_whitespace": 0.0006209117540907837
					},
					"truthfulqa": {
						"acc,none": 0.30632046737222796,
						"acc_stderr,none": 0.0018482852028236226,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31946144430844553,
						"bleu_acc_stderr,none": 0.016322644182960498,
						"bleu_diff,none": -5.354581925951029,
						"bleu_diff_stderr,none": 0.7490692251224637,
						"bleu_max,none": 22.575556120505805,
						"bleu_max_stderr,none": 0.7430338013356429,
						"rouge1_acc,none": 0.29253365973072215,
						"rouge1_acc_stderr,none": 0.015925597445286165,
						"rouge1_diff,none": -7.446877850094821,
						"rouge1_diff_stderr,none": 0.8648739296481254,
						"rouge1_max,none": 46.34984627747259,
						"rouge1_max_stderr,none": 0.8722417823976386,
						"rouge2_acc,none": 0.2350061199510404,
						"rouge2_acc_stderr,none": 0.014843061507731615,
						"rouge2_diff,none": -9.272747045563198,
						"rouge2_diff_stderr,none": 0.9969547067710065,
						"rouge2_max,none": 29.61475975659187,
						"rouge2_max_stderr,none": 0.9794499444707578,
						"rougeL_acc,none": 0.28518971848225216,
						"rougeL_acc_stderr,none": 0.015805827874454892,
						"rougeL_diff,none": -7.629525541962505,
						"rougeL_diff_stderr,none": 0.8694041166189354,
						"rougeL_max,none": 43.67019975982433,
						"rougeL_max_stderr,none": 0.8827245562177154
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.31946144430844553,
						"bleu_acc_stderr,none": 0.016322644182960498,
						"bleu_diff,none": -5.354581925951029,
						"bleu_diff_stderr,none": 0.7490692251224637,
						"bleu_max,none": 22.575556120505805,
						"bleu_max_stderr,none": 0.7430338013356429,
						"rouge1_acc,none": 0.29253365973072215,
						"rouge1_acc_stderr,none": 0.015925597445286165,
						"rouge1_diff,none": -7.446877850094821,
						"rouge1_diff_stderr,none": 0.8648739296481254,
						"rouge1_max,none": 46.34984627747259,
						"rouge1_max_stderr,none": 0.8722417823976386,
						"rouge2_acc,none": 0.2350061199510404,
						"rouge2_acc_stderr,none": 0.014843061507731615,
						"rouge2_diff,none": -9.272747045563198,
						"rouge2_diff_stderr,none": 0.9969547067710065,
						"rouge2_max,none": 29.61475975659187,
						"rouge2_max_stderr,none": 0.9794499444707578,
						"rougeL_acc,none": 0.28518971848225216,
						"rougeL_acc_stderr,none": 0.015805827874454892,
						"rougeL_diff,none": -7.629525541962505,
						"rougeL_diff_stderr,none": 0.8694041166189354,
						"rougeL_max,none": 43.67019975982433,
						"rougeL_max_stderr,none": 0.8827245562177154
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2252141982864137,
						"acc_stderr,none": 0.014623240768023515,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.38746698163957854,
						"acc_stderr,none": 0.013851963967947878,
						"alias": "truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.011318897637795276,
						"exact_match_stderr,none": 0.0023473357928725683
					},
					"wic": {
						"acc,none": 0.4952978056426332,
						"acc_stderr,none": 0.01980984521925977,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.719096605535433,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.646150916185073,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 14.373441237489386,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.5911602209944752,
						"acc_stderr,none": 0.01381695429513568,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6057692307692307,
						"acc_stderr,none": 0.04815154775990711,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7765567765567766,
						"acc_stderr,none": 0.025257231735255518,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5789090909090909,
						"acc_stderr,none": 0.04463456332011875,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.02216263442665284,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.636,
						"acc_stderr,none": 0.021539170637317695,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.0215136625275824,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.562,
						"acc_stderr,none": 0.022210326363977417,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407056,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.02218721580302901,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.022221331534143022,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.614,
						"acc_stderr,none": 0.021793529219281165,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.0213237286328075,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4044979919678715,
						"acc_stderr,none": 0.04620022346504284,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939166,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.42610441767068274,
						"acc_stderr,none": 0.009912016377459067,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.44859437751004017,
						"acc_stderr,none": 0.009968964736894263,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.37349397590361444,
						"acc_stderr,none": 0.00969598596221976,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5108433734939759,
						"acc_stderr,none": 0.010019715824483473,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4566265060240964,
						"acc_stderr,none": 0.009984293410840315,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.457429718875502,
						"acc_stderr,none": 0.009985682220227464,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3682730923694779,
						"acc_stderr,none": 0.009668013178998446,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4493975903614458,
						"acc_stderr,none": 0.009970615649588139,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337356,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.38473895582329315,
						"acc_stderr,none": 0.00975214930715253,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.39799196787148594,
						"acc_stderr,none": 0.009811284026425582,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3506024096385542,
						"acc_stderr,none": 0.009564237156206098,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.43052208835341366,
						"acc_stderr,none": 0.009924844537285524,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.342570281124498,
						"acc_stderr,none": 0.009512333319470373,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5785452138860477,
						"acc_stderr,none": 0.05501404774447183,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5373924553275976,
						"acc_stderr,none": 0.012831093347016553,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7200529450694904,
						"acc_stderr,none": 0.011553982180012726,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6293845135671741,
						"acc_stderr,none": 0.012428861084065901,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5334215751158173,
						"acc_stderr,none": 0.01283834793473167,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5407015221707479,
						"acc_stderr,none": 0.012824422739625582,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.614824619457313,
						"acc_stderr,none": 0.012523231571141193,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.49172733289212445,
						"acc_stderr,none": 0.012865364020375405,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6207809397749835,
						"acc_stderr,none": 0.012486070771171328,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5115817339510258,
						"acc_stderr,none": 0.012863672949335873,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5691594970218399,
						"acc_stderr,none": 0.012743443034698402,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5949702183984117,
						"acc_stderr,none": 0.012632887218751379,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7302764666217127,
						"acc_stderr,none": 0.044406120682486394,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8047311827956989,
						"acc_stderr,none": 0.008222877134034018,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7228915662650602,
						"acc_stderr,none": 0.04942589299783093,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6058394160583942,
						"acc_stderr,none": 0.0157881994597223,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6692015209125475,
						"acc_stderr,none": 0.02906762615931534,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6634920634920635,
						"acc_stderr,none": 0.026665559335926015,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6984126984126984,
						"acc_stderr,none": 0.02046343784622378,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "RWKV/rwkv-5-world-1b5"
	},
	"RWKV/rwkv-5-world-3b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5713077790304397,
						"acc_norm,none": 0.5479143179255919,
						"acc_norm_stderr,none": 0.08631239451039338,
						"acc_stderr,none": 0.11036397970520481,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.345625,
						"acc_stderr,none": 0.014725721410778525,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.10815,
						"acc_stderr,none": 0.07701291185860645,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.839134328358209,
						"acc_stderr,none": 0.1371670374792212,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.23402674591381872,
						"acc_norm,none": 0.23402674591381872,
						"acc_norm_stderr,none": 0.11008875472556898,
						"acc_stderr,none": 0.11008875472556898,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2564323950958383,
						"acc_norm,none": 0.2564323950958383,
						"acc_norm_stderr,none": 0.0405388292321595,
						"acc_stderr,none": 0.0405388292321595,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.299949686940966,
						"likelihood_diff_stderr,none": 0.4653467418700576,
						"pct_stereotype,none": 0.5906380441264162,
						"pct_stereotype_stderr,none": 0.07757942010924757
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0004921259842519685,
						"exact_match_stderr,none": 0.0004921259842519664
					},
					"glue": {
						"acc,none": 0.46339733645070746,
						"acc_stderr,none": 0.03557007357551107,
						"alias": "glue",
						"f1,none": 0.5689916758979556,
						"f1_stderr,none": 2.1992423859851198e-05,
						"mcc,none": 0.03192765518850275,
						"mcc_stderr,none": 0.0009384980364050378
					},
					"kmmlu": {
						"acc,none": 0.11969390701703725,
						"acc_norm,none": 0.11969390701703725,
						"acc_norm_stderr,none": 0.057566267190087464,
						"acc_stderr,none": 0.057566267190087464,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5097566323174743,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.0005010020040080159,
						"acc_stderr,none": 0.04330774785880263,
						"alias": "kobest",
						"f1,none": 0.4118726967569513,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.660392004657481,
						"acc_stderr,none": 0.014836345680739133,
						"alias": "lambada",
						"perplexity,none": 4.810776402321361,
						"perplexity_stderr,none": 0.34513479005348574
					},
					"lambada_cloze": {
						"acc,none": 0.055113526101300214,
						"acc_stderr,none": 0.017365122375936747,
						"alias": "lambada_cloze",
						"perplexity,none": 463.8036731449883,
						"perplexity_stderr,none": 100.32850686627049
					},
					"lambada_multilingual": {
						"acc,none": 0.4905880069862216,
						"acc_stderr,none": 0.07500918763308738,
						"alias": "lambada_multilingual",
						"perplexity,none": 30.767081022687634,
						"perplexity_stderr,none": 11.482538353579598
					},
					"mmlu": {
						"acc,none": 0.24711579547073068,
						"acc_stderr,none": 0.03715355989276938,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.25313496280552605,
						"acc_stderr,none": 0.03497821947589105,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.24460894753781784,
						"acc_stderr,none": 0.03601804484049794,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2378940526486838,
						"acc_stderr,none": 0.03335213570316462,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2496035521725341,
						"acc_stderr,none": 0.04361356583373247,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2901348474095103,
						"acc_norm,none": 0.26761912363317103,
						"acc_norm_stderr,none": 8.571791592033924e-05,
						"acc_stderr,none": 0.09973310621505148,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5155714285714286,
						"acc_stderr,none": 0.019605266516153342,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7245458096462232,
						"acc_norm,none": 0.5535149853752926,
						"acc_norm_stderr,none": 0.009558168490908577,
						"acc_stderr,none": 0.1362978365601511,
						"alias": "pythia",
						"bits_per_byte,none": 0.6806840467125757,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6028995819336924,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.1543807139832785,
						"perplexity_stderr,none": 0.08968729979538739,
						"word_perplexity,none": 12.465979503288589,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3351063829787234,
						"acc_norm,none": 0.38475177304964536,
						"acc_norm_stderr,none": 0.06301839548681355,
						"acc_stderr,none": 0.04227058306766687,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5218794715650061,
						"acc_stderr,none": 0.017958144871982953,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.29703702849199803,
						"acc_stderr,none": 0.0012828771176254465,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.29498164014687883,
						"bleu_acc_stderr,none": 0.015964400965589664,
						"bleu_diff,none": -7.840653357813317,
						"bleu_diff_stderr,none": 0.7754919380348179,
						"bleu_max,none": 24.969060312909836,
						"bleu_max_stderr,none": 0.7557545682762691,
						"rouge1_acc,none": 0.28886168910648713,
						"rouge1_acc_stderr,none": 0.01586634640138431,
						"rouge1_diff,none": -10.120249669504368,
						"rouge1_diff_stderr,none": 0.8379142333128283,
						"rouge1_max,none": 50.90552409890141,
						"rouge1_max_stderr,none": 0.8232371627604237,
						"rouge2_acc,none": 0.24724602203182375,
						"rouge2_acc_stderr,none": 0.015102404797359652,
						"rouge2_diff,none": -12.129225789767089,
						"rouge2_diff_stderr,none": 1.02691682629521,
						"rouge2_max,none": 34.1959988780803,
						"rouge2_max_stderr,none": 0.9629739735311235,
						"rougeL_acc,none": 0.28151774785801714,
						"rougeL_acc_stderr,none": 0.01574402724825605,
						"rougeL_diff,none": -10.376350888599612,
						"rougeL_diff_stderr,none": 0.842205766667267,
						"rougeL_max,none": 47.81377456968735,
						"rougeL_max_stderr,none": 0.833575150044744
					},
					"xcopa": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.05725225820316545,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4234805890227577,
						"acc_stderr,none": 0.047083828616057824,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5983996149449492,
						"acc_stderr,none": 0.04990323405396006,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7691616093504158,
						"acc_stderr,none": 0.04425483860662812,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5713077790304397,
						"acc_norm,none": 0.5479143179255919,
						"acc_norm_stderr,none": 0.08631239451039338,
						"acc_stderr,none": 0.11036397970520481,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.345625,
						"acc_stderr,none": 0.014725721410778525,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.347,
						"acc_stderr,none": 0.015060472031706618,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.015090650341444233,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3408333333333333,
						"acc_stderr,none": 0.013688600793296937,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3378839590443686,
						"acc_norm,none": 0.3660409556313993,
						"acc_norm_stderr,none": 0.014077223108470142,
						"acc_stderr,none": 0.013822047922283509,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6864478114478114,
						"acc_norm,none": 0.6376262626262627,
						"acc_norm_stderr,none": 0.009863468202583775,
						"acc_stderr,none": 0.009519779157242258,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.10815,
						"acc_stderr,none": 0.07701291185860645,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0165,
						"acc_stderr,none": 0.002849198828966349,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.1315,
						"acc_stderr,none": 0.007558600480287942,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.249,
						"acc_stderr,none": 0.00967193223386985,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.3195,
						"acc_stderr,none": 0.010429010361897305,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.061,
						"acc_stderr,none": 0.005352926948264491,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.1515,
						"acc_stderr,none": 0.008019103940840797,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.018,
						"acc_stderr,none": 0.002973620892212919,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0585,
						"acc_stderr,none": 0.005249061947211399,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.037,
						"acc_stderr,none": 0.004221896754552657,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.039,
						"acc_stderr,none": 0.004329997048176569,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0013015184381778742,
						"acc_stderr,none": 0.0007511058074590368,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.839134328358209,
						"acc_stderr,none": 0.1371670374792212,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151125,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256562,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448825,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.012583693787968139,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.764,
						"acc_stderr,none": 0.013434451402438671,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.014414290540008213,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.699,
						"acc_stderr,none": 0.014512395033543152,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.010640169792499344,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767667,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319422,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697593,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919297,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.0061998740663370645,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651535,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315134,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178327,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823332,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.013512312258920859,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.012461592646659992,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160328,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592074,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.01087884871433331,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298185,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.015704987954361805,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103315,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.012486268734370148,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.743,
						"acc_stderr,none": 0.013825416526895028,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.844,
						"acc_stderr,none": 0.011480235006122361,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.00751375115747492,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315164,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.00812557844248793,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.00833333333333334,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.594,
						"acc_stderr,none": 0.015537226438634597,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.836,
						"acc_stderr,none": 0.011715000693181307,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.0155944601441406,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.721,
						"acc_stderr,none": 0.014190150117612032,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.864,
						"acc_stderr,none": 0.01084535023047299,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.819,
						"acc_stderr,none": 0.012181436179177925,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525063,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103319,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.765,
						"acc_stderr,none": 0.01341472903024712,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.0048422564417270565,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469417,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.01132816522334168,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.473,
						"acc_stderr,none": 0.01579621855130262,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452374,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275286,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.012975838021968758,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.482,
						"acc_stderr,none": 0.015809045699406728,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042958,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704163,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.014632638658632902,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696836,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524306,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.761,
						"acc_stderr,none": 0.01349300044693759,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.01159890229868901,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.0064889217984274205,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832025,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727081,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275288,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.443,
						"acc_stderr,none": 0.015716169953204105,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.014987482264363933,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6241590214067279,
						"acc_stderr,none": 0.008471147248160116,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.42857142857142855,
						"acc_stderr,none": 0.06672848092813058,
						"alias": "cb",
						"f1,none": 0.3782793782793783,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.23402674591381872,
						"acc_norm,none": 0.23402674591381872,
						"acc_norm_stderr,none": 0.11008875472556898,
						"acc_stderr,none": 0.11008875472556898,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141221,
						"acc_stderr,none": 0.06372446937141221,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.48484848484848486,
						"acc_norm,none": 0.48484848484848486,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2978723404255319,
						"acc_norm,none": 0.2978723404255319,
						"acc_norm_stderr,none": 0.06742861107915606,
						"acc_stderr,none": 0.06742861107915606,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.05781449705557245,
						"acc_stderr,none": 0.05781449705557245,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.07802030664724673,
						"acc_stderr,none": 0.07802030664724673,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.08446516354424752,
						"acc_stderr,none": 0.08446516354424752,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.1891891891891892,
						"acc_norm,none": 0.1891891891891892,
						"acc_norm_stderr,none": 0.06527647182968216,
						"acc_stderr,none": 0.06527647182968216,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.12903225806451613,
						"acc_norm,none": 0.12903225806451613,
						"acc_norm_stderr,none": 0.06120537406777508,
						"acc_stderr,none": 0.06120537406777508,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.060073850409370216,
						"acc_stderr,none": 0.060073850409370216,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.14285714285714285,
						"acc_norm,none": 0.14285714285714285,
						"acc_norm_stderr,none": 0.07824607964359517,
						"acc_stderr,none": 0.07824607964359517,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.09176629354822471,
						"acc_stderr,none": 0.09176629354822471,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.062069005411206336,
						"acc_stderr,none": 0.062069005411206336,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.08742975048915692,
						"acc_stderr,none": 0.08742975048915692,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.062069005411206336,
						"acc_stderr,none": 0.062069005411206336,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0679170334216026,
						"acc_stderr,none": 0.0679170334216026,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.05650315562208096,
						"acc_stderr,none": 0.05650315562208096,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996391,
						"acc_stderr,none": 0.08081046758996391,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2564323950958383,
						"acc_norm,none": 0.2564323950958383,
						"acc_norm_stderr,none": 0.0405388292321595,
						"acc_stderr,none": 0.0405388292321595,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.0353866849031339,
						"acc_stderr,none": 0.0353866849031339,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.21875,
						"acc_norm,none": 0.21875,
						"acc_norm_stderr,none": 0.032784644885244255,
						"acc_stderr,none": 0.032784644885244255,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.28484848484848485,
						"acc_norm,none": 0.28484848484848485,
						"acc_norm_stderr,none": 0.035243908445117836,
						"acc_stderr,none": 0.035243908445117836,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.22488038277511962,
						"acc_norm,none": 0.22488038277511962,
						"acc_norm_stderr,none": 0.028948661140327032,
						"acc_stderr,none": 0.028948661140327032,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.035156741348767645,
						"acc_stderr,none": 0.035156741348767645,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.22137404580152673,
						"acc_norm,none": 0.22137404580152673,
						"acc_norm_stderr,none": 0.036412970813137296,
						"acc_stderr,none": 0.036412970813137296,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.22794117647058823,
						"acc_norm,none": 0.22794117647058823,
						"acc_norm_stderr,none": 0.03610519574180446,
						"acc_stderr,none": 0.03610519574180446,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.22429906542056074,
						"acc_norm,none": 0.22429906542056074,
						"acc_norm_stderr,none": 0.04051426427955262,
						"acc_stderr,none": 0.04051426427955262,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25386996904024767,
						"acc_norm,none": 0.25386996904024767,
						"acc_norm_stderr,none": 0.024254090252458053,
						"acc_stderr,none": 0.024254090252458053,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2696078431372549,
						"acc_norm,none": 0.2696078431372549,
						"acc_norm_stderr,none": 0.031145570659486782,
						"acc_stderr,none": 0.031145570659486782,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.25139664804469275,
						"acc_norm,none": 0.25139664804469275,
						"acc_norm_stderr,none": 0.03251588837184109,
						"acc_stderr,none": 0.03251588837184109,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2869198312236287,
						"acc_norm,none": 0.2869198312236287,
						"acc_norm_stderr,none": 0.029443773022594696,
						"acc_stderr,none": 0.029443773022594696,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3644859813084112,
						"acc_norm,none": 0.3644859813084112,
						"acc_norm_stderr,none": 0.04674660221110773,
						"acc_stderr,none": 0.04674660221110773,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.044801270921106716,
						"acc_stderr,none": 0.044801270921106716,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.23148148148148148,
						"acc_norm,none": 0.23148148148148148,
						"acc_norm_stderr,none": 0.04077494709252628,
						"acc_stderr,none": 0.04077494709252628,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.040842473153370994,
						"acc_stderr,none": 0.040842473153370994,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.22344322344322345,
						"acc_norm,none": 0.22344322344322345,
						"acc_norm_stderr,none": 0.02525723173525551,
						"acc_stderr,none": 0.02525723173525551,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.24019607843137256,
						"acc_norm,none": 0.24019607843137256,
						"acc_norm_stderr,none": 0.02998373305591362,
						"acc_stderr,none": 0.02998373305591362,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.033773102522091945,
						"acc_stderr,none": 0.033773102522091945,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.03558926157606755,
						"acc_stderr,none": 0.03558926157606755,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2589928057553957,
						"acc_norm,none": 0.2589928057553957,
						"acc_norm_stderr,none": 0.037291986581642324,
						"acc_stderr,none": 0.037291986581642324,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.22012578616352202,
						"acc_norm,none": 0.22012578616352202,
						"acc_norm_stderr,none": 0.03296242821158485,
						"acc_stderr,none": 0.03296242821158485,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2392638036809816,
						"acc_norm,none": 0.2392638036809816,
						"acc_norm_stderr,none": 0.033519538795212696,
						"acc_stderr,none": 0.033519538795212696,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.034300856070148815,
						"acc_stderr,none": 0.034300856070148815,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.25793650793650796,
						"acc_norm,none": 0.25793650793650796,
						"acc_norm_stderr,none": 0.027614684139414543,
						"acc_stderr,none": 0.027614684139414543,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.029857515673386407,
						"acc_stderr,none": 0.029857515673386407,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2815126050420168,
						"acc_norm,none": 0.2815126050420168,
						"acc_norm_stderr,none": 0.02921354941437216,
						"acc_stderr,none": 0.02921354941437216,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.29130434782608694,
						"acc_norm,none": 0.29130434782608694,
						"acc_norm_stderr,none": 0.0300251804632419,
						"acc_stderr,none": 0.0300251804632419,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174022,
						"acc_stderr,none": 0.03749850709174022,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.26573426573426573,
						"acc_norm,none": 0.26573426573426573,
						"acc_norm_stderr,none": 0.03706860462623559,
						"acc_stderr,none": 0.03706860462623559,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.23295454545454544,
						"acc_norm,none": 0.23295454545454544,
						"acc_norm_stderr,none": 0.03195413903050176,
						"acc_stderr,none": 0.03195413903050176,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.26174496644295303,
						"acc_norm,none": 0.26174496644295303,
						"acc_norm_stderr,none": 0.03613362391075455,
						"acc_stderr,none": 0.03613362391075455,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516737,
						"acc_stderr,none": 0.03385633936516737,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.2196969696969697,
						"acc_norm,none": 0.2196969696969697,
						"acc_norm_stderr,none": 0.03617495772540233,
						"acc_stderr,none": 0.03617495772540233,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.039803298549204336,
						"acc_stderr,none": 0.039803298549204336,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.21818181818181817,
						"acc_norm,none": 0.21818181818181817,
						"acc_norm_stderr,none": 0.03955932861795833,
						"acc_stderr,none": 0.03955932861795833,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.03535681229053241,
						"acc_stderr,none": 0.03535681229053241,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2698412698412698,
						"acc_norm,none": 0.2698412698412698,
						"acc_norm_stderr,none": 0.03970158273235172,
						"acc_stderr,none": 0.03970158273235172,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815186,
						"acc_stderr,none": 0.031864394925815186,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.26744186046511625,
						"acc_norm,none": 0.26744186046511625,
						"acc_norm_stderr,none": 0.03384836428157859,
						"acc_stderr,none": 0.03384836428157859,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2749391727493917,
						"acc_norm,none": 0.2749391727493917,
						"acc_norm_stderr,none": 0.022050254355995065,
						"acc_stderr,none": 0.022050254355995065,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2850467289719626,
						"acc_norm,none": 0.2850467289719626,
						"acc_norm_stderr,none": 0.03093193278921873,
						"acc_stderr,none": 0.03093193278921873,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.22764227642276422,
						"acc_norm,none": 0.22764227642276422,
						"acc_norm_stderr,none": 0.03796258624175263,
						"acc_stderr,none": 0.03796258624175263,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.22950819672131148,
						"acc_norm,none": 0.22950819672131148,
						"acc_norm_stderr,none": 0.03822877895195424,
						"acc_stderr,none": 0.03822877895195424,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2714285714285714,
						"acc_norm,none": 0.2714285714285714,
						"acc_norm_stderr,none": 0.03076030982422604,
						"acc_stderr,none": 0.03076030982422604,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3111111111111111,
						"acc_norm,none": 0.3111111111111111,
						"acc_norm_stderr,none": 0.0346023691873273,
						"acc_stderr,none": 0.0346023691873273,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.03106324157397347,
						"acc_stderr,none": 0.03106324157397347,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.04083221538649575,
						"acc_stderr,none": 0.04083221538649575,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2620689655172414,
						"acc_norm,none": 0.2620689655172414,
						"acc_norm_stderr,none": 0.03664666337225256,
						"acc_stderr,none": 0.03664666337225256,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2342857142857143,
						"acc_norm,none": 0.2342857142857143,
						"acc_norm_stderr,none": 0.032109360396926204,
						"acc_stderr,none": 0.032109360396926204,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2843601895734597,
						"acc_norm,none": 0.2843601895734597,
						"acc_norm_stderr,none": 0.031129489323148667,
						"acc_stderr,none": 0.031129489323148667,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.022360679774997897,
						"acc_stderr,none": 0.022360679774997897,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.2629310344827586,
						"acc_norm,none": 0.2629310344827586,
						"acc_norm_stderr,none": 0.028964697544540174,
						"acc_stderr,none": 0.028964697544540174,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.03329115112144781,
						"acc_stderr,none": 0.03329115112144781,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800254,
						"acc_stderr,none": 0.03885004245800254,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.2610619469026549,
						"acc_norm,none": 0.2610619469026549,
						"acc_norm_stderr,none": 0.029280908211631693,
						"acc_stderr,none": 0.029280908211631693,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.03317505930009182,
						"acc_stderr,none": 0.03317505930009182,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.032092816451453864,
						"acc_stderr,none": 0.032092816451453864,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676974,
						"acc_stderr,none": 0.03410167836676974,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2422360248447205,
						"acc_norm,none": 0.2422360248447205,
						"acc_norm_stderr,none": 0.033870869961530825,
						"acc_stderr,none": 0.033870869961530825,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.03192765518850275,
						"mcc_stderr,none": 0.03063491531578042
					},
					"copa": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.03588702812826371,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.299949686940966,
						"likelihood_diff_stderr,none": 0.4653467418700576,
						"pct_stereotype,none": 0.5906380441264162,
						"pct_stereotype_stderr,none": 0.07757942010924757
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.481216457960644,
						"likelihood_diff_stderr,none": 0.08434438387551527,
						"pct_stereotype,none": 0.6231365533691116,
						"pct_stereotype_stderr,none": 0.011837135379821511
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.5934065934065935,
						"likelihood_diff_stderr,none": 0.3675106493778246,
						"pct_stereotype,none": 0.6703296703296703,
						"pct_stereotype_stderr,none": 0.04955219508596586
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.238636363636363,
						"likelihood_diff_stderr,none": 1.372254960889539,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.276923076923077,
						"likelihood_diff_stderr,none": 0.6553020414530574,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.057692307692307675
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.425,
						"likelihood_diff_stderr,none": 0.16158353969920364,
						"pct_stereotype,none": 0.65,
						"pct_stereotype_stderr,none": 0.026705170739027832
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.3778935185185186,
						"likelihood_diff_stderr,none": 0.22474211542176306,
						"pct_stereotype,none": 0.5601851851851852,
						"pct_stereotype_stderr,none": 0.03385177976044811
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.90625,
						"likelihood_diff_stderr,none": 0.33782143954989274,
						"pct_stereotype,none": 0.7777777777777778,
						"pct_stereotype_stderr,none": 0.04933922619854289
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.251968503937008,
						"likelihood_diff_stderr,none": 0.1480408724708936,
						"pct_stereotype,none": 0.5039370078740157,
						"pct_stereotype_stderr,none": 0.02220509119300217
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.733108108108108,
						"likelihood_diff_stderr,none": 0.3555978807602826,
						"pct_stereotype,none": 0.7297297297297297,
						"pct_stereotype_stderr,none": 0.0423432136108454
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.044354838709677,
						"likelihood_diff_stderr,none": 0.3440700418507781,
						"pct_stereotype,none": 0.8602150537634409,
						"pct_stereotype_stderr,none": 0.036152622588464155
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.317763157894737,
						"likelihood_diff_stderr,none": 0.23514147103204017,
						"pct_stereotype,none": 0.6789473684210526,
						"pct_stereotype_stderr,none": 0.03396059335824887
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.1188133571854504,
						"likelihood_diff_stderr,none": 0.07362789524586401,
						"pct_stereotype,none": 0.5581395348837209,
						"pct_stereotype_stderr,none": 0.012130451299814663
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 2.8,
						"likelihood_diff_stderr,none": 0.287407369868418,
						"pct_stereotype,none": 0.4888888888888889,
						"pct_stereotype_stderr,none": 0.05298680599073449
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 1.7307692307692308,
						"likelihood_diff_stderr,none": 0.4250391558711604,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.643939393939394,
						"likelihood_diff_stderr,none": 0.4192366177571236,
						"pct_stereotype,none": 0.7878787878787878,
						"pct_stereotype_stderr,none": 0.05070666827479244
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.8457943925233646,
						"likelihood_diff_stderr,none": 0.14499656150014167,
						"pct_stereotype,none": 0.5482866043613707,
						"pct_stereotype_stderr,none": 0.02782020420481579
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.152173913043478,
						"likelihood_diff_stderr,none": 0.17697832486684445,
						"pct_stereotype,none": 0.4308300395256917,
						"pct_stereotype_stderr,none": 0.031194189309843277
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.6753472222222223,
						"likelihood_diff_stderr,none": 0.451937743766653,
						"pct_stereotype,none": 0.7083333333333334,
						"pct_stereotype_stderr,none": 0.05394274771736147
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.814673913043478,
						"likelihood_diff_stderr,none": 0.14293642135844017,
						"pct_stereotype,none": 0.4434782608695652,
						"pct_stereotype_stderr,none": 0.023188405797101467
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.266304347826087,
						"likelihood_diff_stderr,none": 0.30978327779712833,
						"pct_stereotype,none": 0.7217391304347827,
						"pct_stereotype_stderr,none": 0.041972396739020965
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.206043956043956,
						"likelihood_diff_stderr,none": 0.2695809563786431,
						"pct_stereotype,none": 0.8461538461538461,
						"pct_stereotype_stderr,none": 0.03803178711331109
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.605548469387755,
						"likelihood_diff_stderr,none": 0.2349380045199283,
						"pct_stereotype,none": 0.6785714285714286,
						"pct_stereotype_stderr,none": 0.033444346798974046
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0004921259842519685,
						"exact_match_stderr,none": 0.0004921259842519664
					},
					"glue": {
						"acc,none": 0.46339733645070746,
						"acc_stderr,none": 0.03557007357551107,
						"alias": "glue",
						"f1,none": 0.5689916758979556,
						"f1_stderr,none": 2.1992423859851198e-05,
						"mcc,none": 0.03192765518850275,
						"mcc_stderr,none": 0.0009384980364050378
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.021986353297952996,
						"exact_match_stderr,get-answer": 0.0040391627581100615
					},
					"hellaswag": {
						"acc,none": 0.47102170882294364,
						"acc_norm,none": 0.6263692491535551,
						"acc_norm_stderr,none": 0.004827786289074841,
						"acc_stderr,none": 0.004981394110706144,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.11969390701703725,
						"acc_norm,none": 0.11969390701703725,
						"acc_norm_stderr,none": 0.057566267190087464,
						"acc_stderr,none": 0.057566267190087464,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.16,
						"acc_norm,none": 0.16,
						"acc_norm_stderr,none": 0.036845294917747094,
						"acc_stderr,none": 0.036845294917747094,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.09,
						"acc_norm,none": 0.09,
						"acc_norm_stderr,none": 0.009054390204866437,
						"acc_stderr,none": 0.009054390204866437,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.116,
						"acc_norm,none": 0.116,
						"acc_norm_stderr,none": 0.010131468138756978,
						"acc_stderr,none": 0.010131468138756978,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.197,
						"acc_norm,none": 0.197,
						"acc_norm_stderr,none": 0.012583693787968123,
						"acc_stderr,none": 0.012583693787968123,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.209,
						"acc_norm,none": 0.209,
						"acc_norm_stderr,none": 0.012864077288499353,
						"acc_stderr,none": 0.012864077288499353,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.19666666666666666,
						"acc_norm,none": 0.19666666666666666,
						"acc_norm_stderr,none": 0.016240517402183755,
						"acc_stderr,none": 0.016240517402183755,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.027,
						"acc_norm,none": 0.027,
						"acc_norm_stderr,none": 0.005128089049275289,
						"acc_stderr,none": 0.005128089049275289,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.053,
						"acc_norm,none": 0.053,
						"acc_norm_stderr,none": 0.007088105617246444,
						"acc_stderr,none": 0.007088105617246444,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.042,
						"acc_norm,none": 0.042,
						"acc_norm_stderr,none": 0.006346359293033829,
						"acc_stderr,none": 0.006346359293033829,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.02912242397001744,
						"acc_stderr,none": 0.02912242397001744,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.06,
						"acc_norm,none": 0.06,
						"acc_norm_stderr,none": 0.007513751157474911,
						"acc_stderr,none": 0.007513751157474911,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2846153846153846,
						"acc_norm,none": 0.2846153846153846,
						"acc_norm_stderr,none": 0.03972867937362452,
						"acc_stderr,none": 0.03972867937362452,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.054,
						"acc_norm,none": 0.054,
						"acc_norm_stderr,none": 0.007150883521295433,
						"acc_stderr,none": 0.007150883521295433,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.044,
						"acc_norm,none": 0.044,
						"acc_norm_stderr,none": 0.0064889217984274205,
						"acc_stderr,none": 0.0064889217984274205,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.194,
						"acc_norm,none": 0.194,
						"acc_norm_stderr,none": 0.01251081614126438,
						"acc_stderr,none": 0.01251081614126438,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.062,
						"acc_norm,none": 0.062,
						"acc_norm_stderr,none": 0.007629823996280308,
						"acc_stderr,none": 0.007629823996280308,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.152,
						"acc_norm,none": 0.152,
						"acc_norm_stderr,none": 0.011358918303475286,
						"acc_stderr,none": 0.011358918303475286,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.144,
						"acc_norm,none": 0.144,
						"acc_norm_stderr,none": 0.01110798754893915,
						"acc_stderr,none": 0.01110798754893915,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.106,
						"acc_norm,none": 0.106,
						"acc_norm_stderr,none": 0.00973955126578513,
						"acc_stderr,none": 0.00973955126578513,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.092,
						"acc_norm,none": 0.092,
						"acc_norm_stderr,none": 0.009144376393151094,
						"acc_stderr,none": 0.009144376393151094,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909283,
						"acc_stderr,none": 0.04292346959909283,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.041,
						"acc_norm,none": 0.041,
						"acc_norm_stderr,none": 0.006273624021118755,
						"acc_stderr,none": 0.006273624021118755,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.06,
						"acc_norm,none": 0.06,
						"acc_norm_stderr,none": 0.007513751157474914,
						"acc_stderr,none": 0.007513751157474914,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.075,
						"acc_norm,none": 0.075,
						"acc_norm_stderr,none": 0.008333333333333361,
						"acc_stderr,none": 0.008333333333333361,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.01339490288966001,
						"acc_stderr,none": 0.01339490288966001,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.099,
						"acc_norm,none": 0.099,
						"acc_norm_stderr,none": 0.009449248027662761,
						"acc_stderr,none": 0.009449248027662761,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.198,
						"acc_norm,none": 0.198,
						"acc_norm_stderr,none": 0.012607733934175297,
						"acc_stderr,none": 0.012607733934175297,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.155,
						"acc_norm,none": 0.155,
						"acc_norm_stderr,none": 0.014787024497482542,
						"acc_stderr,none": 0.014787024497482542,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.12,
						"acc_norm,none": 0.12,
						"acc_norm_stderr,none": 0.010281328012747391,
						"acc_stderr,none": 0.010281328012747391,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.131,
						"acc_norm,none": 0.131,
						"acc_norm_stderr,none": 0.010674874844837956,
						"acc_stderr,none": 0.010674874844837956,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.085,
						"acc_norm,none": 0.085,
						"acc_norm_stderr,none": 0.008823426366942293,
						"acc_stderr,none": 0.008823426366942293,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.097,
						"acc_norm,none": 0.097,
						"acc_norm_stderr,none": 0.009363689373248132,
						"acc_stderr,none": 0.009363689373248132,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.042923469599092816,
						"acc_stderr,none": 0.042923469599092816,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23666666666666666,
						"acc_norm,none": 0.23666666666666666,
						"acc_norm_stderr,none": 0.024580463430538727,
						"acc_stderr,none": 0.024580463430538727,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750638,
						"acc_stderr,none": 0.013626065817750638,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.093,
						"acc_norm,none": 0.093,
						"acc_norm_stderr,none": 0.009188875634996712,
						"acc_stderr,none": 0.009188875634996712,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.137,
						"acc_norm,none": 0.137,
						"acc_norm_stderr,none": 0.010878848714333316,
						"acc_stderr,none": 0.010878848714333316,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.02752568467055655,
						"acc_stderr,none": 0.02752568467055655,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.145,
						"acc_norm,none": 0.145,
						"acc_norm_stderr,none": 0.011139977517890155,
						"acc_stderr,none": 0.011139977517890155,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.152,
						"acc_norm,none": 0.152,
						"acc_norm_stderr,none": 0.011358918303475279,
						"acc_stderr,none": 0.011358918303475279,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.062,
						"acc_norm,none": 0.062,
						"acc_norm_stderr,none": 0.007629823996280308,
						"acc_stderr,none": 0.007629823996280308,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5097566323174743,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.0005010020040080159,
						"acc_stderr,none": 0.04330774785880263,
						"alias": "kobest",
						"f1,none": 0.4118726967569513,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5028490028490028,
						"acc_stderr,none": 0.013348550797680823,
						"alias": " - kobest_boolq",
						"f1,none": 0.33586879913255624,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.015560917136921664,
						"alias": " - kobest_copa",
						"f1,none": 0.5894071038579709,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.404,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.022383074051792257,
						"acc_stderr,none": 0.02196663529383292,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.4002693876288701,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5340050377833753,
						"acc_stderr,none": 0.025067769630661905,
						"alias": " - kobest_sentineg",
						"f1,none": 0.5142815573147101,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.660392004657481,
						"acc_stderr,none": 0.014836345680739133,
						"alias": "lambada",
						"perplexity,none": 4.810776402321361,
						"perplexity_stderr,none": 0.34513479005348574
					},
					"lambada_cloze": {
						"acc,none": 0.055113526101300214,
						"acc_stderr,none": 0.017365122375936747,
						"alias": "lambada_cloze",
						"perplexity,none": 463.8036731449883,
						"perplexity_stderr,none": 100.32850686627049
					},
					"lambada_multilingual": {
						"acc,none": 0.4905880069862216,
						"acc_stderr,none": 0.07500918763308738,
						"alias": "lambada_multilingual",
						"perplexity,none": 30.767081022687634,
						"perplexity_stderr,none": 11.482538353579598
					},
					"lambada_openai": {
						"acc,none": 0.6865903357267611,
						"acc_stderr,none": 0.006462746304240013,
						"alias": " - lambada_openai",
						"perplexity,none": 4.1543807139832785,
						"perplexity_stderr,none": 0.08968729979538739
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.020958664855424025,
						"acc_stderr,none": 0.0019956960300098027,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 661.0519806881329,
						"perplexity_stderr,none": 24.311634066214562
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.39705026198331067,
						"acc_stderr,none": 0.006816718684122089,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 47.83970682882908,
						"perplexity_stderr,none": 2.778877570107503
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6867843974383854,
						"acc_stderr,none": 0.006461658130130337,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.156846540122834,
						"perplexity_stderr,none": 0.08993174260208334
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4104405200853872,
						"acc_stderr,none": 0.006853319847090063,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 43.22413654725905,
						"perplexity_stderr,none": 2.242698239017913
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.49408111779545894,
						"acc_stderr,none": 0.0069654895595806015,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 24.659244935805162,
						"perplexity_stderr,none": 1.2820711376035945
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.46458373762856586,
						"acc_stderr,none": 0.006948480669195307,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 33.95547026142204,
						"perplexity_stderr,none": 1.901144450513194
					},
					"lambada_standard": {
						"acc,none": 0.6338055501649524,
						"acc_stderr,none": 0.006711907623691287,
						"alias": " - lambada_standard",
						"perplexity,none": 5.464369106446901,
						"perplexity_stderr,none": 0.1287116959050358
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.0892683873471764,
						"acc_stderr,none": 0.003972428813326234,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 266.5553656018438,
						"perplexity_stderr,none": 9.235446930954314
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.25254452926208654,
						"exact_match_stderr,get-answer": 0.010961589961715618
					},
					"logiqa": {
						"acc,none": 0.22119815668202766,
						"acc_norm,none": 0.27956989247311825,
						"acc_norm_stderr,none": 0.01760290918682245,
						"acc_stderr,none": 0.016279743532401667,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.23091603053435114,
						"acc_norm,none": 0.2818066157760814,
						"acc_norm_stderr,none": 0.011350322458479644,
						"acc_stderr,none": 0.01063226588725422,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.24857621440536012,
						"acc_norm,none": 0.24388609715242882,
						"acc_norm_stderr,none": 0.0078611797060005,
						"acc_stderr,none": 0.007911755262023768,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.47945350561321753,
						"acc_stderr,none": 0.005141549406613343,
						"alias": "mc_taco",
						"f1,none": 0.4624302745269605,
						"f1_stderr,none": 0.00648498919416067
					},
					"medmcqa": {
						"acc,none": 0.26535978962467127,
						"acc_norm,none": 0.26535978962467127,
						"acc_norm_stderr,none": 0.0068275185803726365,
						"acc_stderr,none": 0.0068275185803726365,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.2663000785545954,
						"acc_norm,none": 0.2663000785545954,
						"acc_norm_stderr,none": 0.012393709943382436,
						"acc_stderr,none": 0.012393709943382436,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.24711579547073068,
						"acc_stderr,none": 0.03715355989276938,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768077,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.3111111111111111,
						"acc_stderr,none": 0.03999262876617722,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.24342105263157895,
						"acc_stderr,none": 0.034923496688842384,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2188679245283019,
						"acc_stderr,none": 0.025447863825108618,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2569444444444444,
						"acc_stderr,none": 0.03653946969442099,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2543352601156069,
						"acc_stderr,none": 0.0332055644308557,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.043898699568087785,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.23404255319148937,
						"acc_stderr,none": 0.027678452578212383,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21929824561403508,
						"acc_stderr,none": 0.03892431106518753,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.296551724137931,
						"acc_stderr,none": 0.038061426873099935,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24603174603174602,
						"acc_stderr,none": 0.022182037202948368,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.18253968253968253,
						"acc_stderr,none": 0.034550710191021475,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2129032258064516,
						"acc_stderr,none": 0.02328766512726853,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2561576354679803,
						"acc_stderr,none": 0.030712730070982592,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110175,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.26666666666666666,
						"acc_stderr,none": 0.03453131801885415,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.25252525252525254,
						"acc_stderr,none": 0.030954055470365907,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.22797927461139897,
						"acc_stderr,none": 0.03027690994517826,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2230769230769231,
						"acc_stderr,none": 0.021107730127243988,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23333333333333334,
						"acc_stderr,none": 0.025787874220959302,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.19747899159663865,
						"acc_stderr,none": 0.025859164122051456,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.23669724770642203,
						"acc_stderr,none": 0.01822407811729907,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.028353212866863448,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.03039153369274154,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2742616033755274,
						"acc_stderr,none": 0.029041333510598028,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.21973094170403587,
						"acc_stderr,none": 0.027790177064383595,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.22137404580152673,
						"acc_stderr,none": 0.03641297081313729,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.25313496280552605,
						"acc_stderr,none": 0.03497821947589105,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.33884297520661155,
						"acc_stderr,none": 0.04320767807536669,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.21296296296296297,
						"acc_stderr,none": 0.03957835471980979,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.27607361963190186,
						"acc_stderr,none": 0.03512385283705051,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841044,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.20388349514563106,
						"acc_stderr,none": 0.0398913985953177,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.24786324786324787,
						"acc_stderr,none": 0.028286324075564393,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2720306513409962,
						"acc_stderr,none": 0.015913367447500517,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.30057803468208094,
						"acc_stderr,none": 0.024685316867257803,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.21787709497206703,
						"acc_stderr,none": 0.013806211780732977,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2679738562091503,
						"acc_stderr,none": 0.02536060379624256,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.24460894753781784,
						"acc_stderr,none": 0.03601804484049794,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.26366559485530544,
						"acc_stderr,none": 0.02502553850053234,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2808641975308642,
						"acc_stderr,none": 0.025006469755799197,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25177304964539005,
						"acc_stderr,none": 0.025892151156709405,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.24902216427640156,
						"acc_stderr,none": 0.01104489226404077,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.16911764705882354,
						"acc_stderr,none": 0.022770868010113028,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.28431372549019607,
						"acc_stderr,none": 0.01824902441120766,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.04265792110940589,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.19591836734693877,
						"acc_stderr,none": 0.025409301953225678,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2378940526486838,
						"acc_stderr,none": 0.03335213570316462,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.24875621890547264,
						"acc_stderr,none": 0.030567675938916714,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2496035521725341,
						"acc_stderr,none": 0.04361356583373247,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.23493975903614459,
						"acc_stderr,none": 0.03300533186128922,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.03377310252209196,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.41915435557819664,
						"acc_stderr,none": 0.004980745295494702,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.4330756712774613,
						"acc_stderr,none": 0.0049974170342329035,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.5245098039215687,
						"acc_stderr,none": 0.024754284840506468,
						"alias": "mrpc",
						"f1,none": 0.5336538461538461,
						"f1_stderr,none": 0.029590060926367703
					},
					"multimedqa": {
						"acc,none": 0.2901348474095103,
						"acc_norm,none": 0.26761912363317103,
						"acc_norm_stderr,none": 8.571791592033924e-05,
						"acc_stderr,none": 0.09973310621505148,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5719884488448845,
						"acc_stderr,none": 0.007106976252751528,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.683972913780277,
						"mrr_stderr,none": 0.010358719761916388,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4187358916478555,
						"r@2_stderr,none": 0.016583844316361184
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6378856300971308,
						"mrr_stderr,none": 0.010342688610105405,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.48081264108352145,
						"r@2_stderr,none": 0.01679493619062732
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.006371191135734072,
						"exact_match_stderr,remove_whitespace": 0.0013244298594293307
					},
					"openbookqa": {
						"acc,none": 0.264,
						"acc_norm,none": 0.368,
						"acc_norm_stderr,none": 0.021588982568353544,
						"acc_stderr,none": 0.019732885585922087,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.491,
						"acc_stderr,none": 0.011181324206260293,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.499,
						"acc_stderr,none": 0.01118311365477018,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4755,
						"acc_stderr,none": 0.011169702598013184,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.011117724672834362,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.011174185930778315,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.01115725065242577,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5355,
						"acc_stderr,none": 0.011154913314119556,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5155714285714286,
						"acc_stderr,none": 0.019605266516153342,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7431991294885746,
						"acc_norm,none": 0.7328618063112078,
						"acc_norm_stderr,none": 0.010323440492612433,
						"acc_stderr,none": 0.010192864802278061,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2581127241673783,
						"acc_norm,none": 0.2508005977796755,
						"acc_norm_stderr,none": 0.00316691309647289,
						"acc_stderr,none": 0.003197030796465457,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.67,
						"acc_stderr,none": 0.021049612166134817,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7245458096462232,
						"acc_norm,none": 0.5535149853752926,
						"acc_norm_stderr,none": 0.009558168490908577,
						"acc_stderr,none": 0.1362978365601511,
						"alias": "pythia",
						"bits_per_byte,none": 0.6806840467125757,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6028995819336924,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.1543807139832785,
						"perplexity_stderr,none": 0.08968729979538739,
						"word_perplexity,none": 12.465979503288589,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3351063829787234,
						"acc_norm,none": 0.38475177304964536,
						"acc_norm_stderr,none": 0.06301839548681355,
						"acc_stderr,none": 0.04227058306766687,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.5666666666666667,
						"acc_norm_stderr,none": 0.04542567625794981,
						"acc_stderr,none": 0.0451938453788867,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.03839344480212195,
						"acc_stderr,none": 0.036342189215581536,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3204225352112676,
						"acc_norm,none": 0.31338028169014087,
						"acc_norm_stderr,none": 0.027574062217983558,
						"acc_stderr,none": 0.027738807894219453,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5041186161449753,
						"acc_stderr,none": 0.006765181024578747,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.4660153351471679,
						"acc_stderr,none": 0.002480949915353911,
						"alias": "qqp",
						"f1,none": 0.569210815125212,
						"f1_stderr,none": 0.00264383923589463
					},
					"race": {
						"acc,none": 0.3397129186602871,
						"acc_stderr,none": 0.014657914432586407,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.269,
						"em_stderr,none": 0.004434621357755189,
						"f1,none": 0.27729523830115793,
						"f1_stderr,none": 0.004443013607535462
					},
					"rte": {
						"acc,none": 0.631768953068592,
						"acc_stderr,none": 0.029032524428023707,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.926,
						"acc_norm,none": 0.884,
						"acc_norm_stderr,none": 0.010131468138756974,
						"acc_stderr,none": 0.008282064512704159,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6353790613718412,
						"acc_stderr,none": 0.028972282465132407,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.7889908256880734,
						"acc_stderr,none": 0.013825395635819682,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5467359792062382,
						"acc_norm,none": 0.7403778866340098,
						"acc_norm_stderr,none": 0.0030997615151013257,
						"acc_stderr,none": 0.003519615105343039,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5218794715650061,
						"acc_stderr,none": 0.017958144871982953,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.005004255426437999,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.5666362622884362,
						"acc_stderr,none": 0.004988935746758798,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.004950980415950501,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"triviaqa": {
						"alias": "triviaqa",
						"exact_match,remove_whitespace": 0.010365581810075792,
						"exact_match_stderr,remove_whitespace": 0.0007561130065413419
					},
					"truthfulqa": {
						"acc,none": 0.29703702849199803,
						"acc_stderr,none": 0.0012828771176254465,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.29498164014687883,
						"bleu_acc_stderr,none": 0.015964400965589664,
						"bleu_diff,none": -7.840653357813317,
						"bleu_diff_stderr,none": 0.7754919380348179,
						"bleu_max,none": 24.969060312909836,
						"bleu_max_stderr,none": 0.7557545682762691,
						"rouge1_acc,none": 0.28886168910648713,
						"rouge1_acc_stderr,none": 0.01586634640138431,
						"rouge1_diff,none": -10.120249669504368,
						"rouge1_diff_stderr,none": 0.8379142333128283,
						"rouge1_max,none": 50.90552409890141,
						"rouge1_max_stderr,none": 0.8232371627604237,
						"rouge2_acc,none": 0.24724602203182375,
						"rouge2_acc_stderr,none": 0.015102404797359652,
						"rouge2_diff,none": -12.129225789767089,
						"rouge2_diff_stderr,none": 1.02691682629521,
						"rouge2_max,none": 34.1959988780803,
						"rouge2_max_stderr,none": 0.9629739735311235,
						"rougeL_acc,none": 0.28151774785801714,
						"rougeL_acc_stderr,none": 0.01574402724825605,
						"rougeL_diff,none": -10.376350888599612,
						"rougeL_diff_stderr,none": 0.842205766667267,
						"rougeL_max,none": 47.81377456968735,
						"rougeL_max_stderr,none": 0.833575150044744
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.29498164014687883,
						"bleu_acc_stderr,none": 0.015964400965589664,
						"bleu_diff,none": -7.840653357813317,
						"bleu_diff_stderr,none": 0.7754919380348179,
						"bleu_max,none": 24.969060312909836,
						"bleu_max_stderr,none": 0.7557545682762691,
						"rouge1_acc,none": 0.28886168910648713,
						"rouge1_acc_stderr,none": 0.01586634640138431,
						"rouge1_diff,none": -10.120249669504368,
						"rouge1_diff_stderr,none": 0.8379142333128283,
						"rouge1_max,none": 50.90552409890141,
						"rouge1_max_stderr,none": 0.8232371627604237,
						"rouge2_acc,none": 0.24724602203182375,
						"rouge2_acc_stderr,none": 0.015102404797359652,
						"rouge2_diff,none": -12.129225789767089,
						"rouge2_diff_stderr,none": 1.02691682629521,
						"rouge2_max,none": 34.1959988780803,
						"rouge2_max_stderr,none": 0.9629739735311235,
						"rougeL_acc,none": 0.28151774785801714,
						"rougeL_acc_stderr,none": 0.01574402724825605,
						"rougeL_diff,none": -10.376350888599612,
						"rougeL_diff_stderr,none": 0.842205766667267,
						"rougeL_max,none": 47.81377456968735,
						"rougeL_max_stderr,none": 0.833575150044744
					},
					"truthfulqa_mc1": {
						"acc,none": 0.23133414932680538,
						"acc_stderr,none": 0.014761945174862673,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.36270632007541614,
						"acc_stderr,none": 0.013724057323521385,
						"alias": "truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.0004921259842519685,
						"exact_match_stderr,none": 0.0004921259842519664
					},
					"wic": {
						"acc,none": 0.5266457680250783,
						"acc_stderr,none": 0.019782570188812163,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6806840467125757,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6028995819336924,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 12.465979503288589,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6195737963693765,
						"acc_stderr,none": 0.013644727908656834,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.49295774647887325,
						"acc_stderr,none": 0.05975550263548289,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6346153846153846,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8205128205128205,
						"acc_stderr,none": 0.023268851614693054,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.05725225820316545,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407063,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.020776701920308997,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.662,
						"acc_stderr,none": 0.02117566569520941,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.486,
						"acc_stderr,none": 0.022374298166353185,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.022149790663861923,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.02223197069632112,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.022270877485360437,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.02187429930168925,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.654,
						"acc_stderr,none": 0.021294951277234637,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.666,
						"acc_stderr,none": 0.021113492347743727,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4234805890227577,
						"acc_stderr,none": 0.047083828616057824,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337347,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.43132530120481927,
						"acc_stderr,none": 0.009927090290379253,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.47630522088353416,
						"acc_stderr,none": 0.010010812905412062,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.40562248995983935,
						"acc_stderr,none": 0.009841918156163181,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5204819277108433,
						"acc_stderr,none": 0.01001366062993081,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4831325301204819,
						"acc_stderr,none": 0.010016368453021547,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4783132530120482,
						"acc_stderr,none": 0.010012641367065516,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.39036144578313253,
						"acc_stderr,none": 0.009778161879954578,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4807228915662651,
						"acc_stderr,none": 0.010014621554188648,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3682730923694779,
						"acc_stderr,none": 0.009668013178998448,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40441767068273093,
						"acc_stderr,none": 0.009837245625453007,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.42690763052208835,
						"acc_stderr,none": 0.009914408828583412,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.35582329317269074,
						"acc_stderr,none": 0.009596375814335279,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.43373493975903615,
						"acc_stderr,none": 0.009933667945702083,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3610441767068273,
						"acc_stderr,none": 0.009627269742195705,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5983996149449492,
						"acc_stderr,none": 0.04990323405396006,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5598941098610192,
						"acc_stderr,none": 0.012774475160716333,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7445400397088021,
						"acc_stderr,none": 0.0112232070642676,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6585043017868961,
						"acc_stderr,none": 0.012203473241214447,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5433487756452681,
						"acc_stderr,none": 0.012818676452481952,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5645268034414295,
						"acc_stderr,none": 0.012759525506489237,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6307081403044341,
						"acc_stderr,none": 0.01241968588127358,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5016545334215751,
						"acc_stderr,none": 0.012867054869163346,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6432825943084051,
						"acc_stderr,none": 0.012327487677110354,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5268034414295168,
						"acc_stderr,none": 0.012848623899505772,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.585704831237591,
						"acc_stderr,none": 0.012676689821720669,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6234281932495036,
						"acc_stderr,none": 0.01246891448965935,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7691616093504158,
						"acc_stderr,none": 0.04425483860662812,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8395698924731183,
						"acc_stderr,none": 0.007612955714996515,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6867469879518072,
						"acc_stderr,none": 0.05121994210658146,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6684045881126173,
						"acc_stderr,none": 0.01521042023821811,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7186311787072244,
						"acc_stderr,none": 0.027780519816709794,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6571428571428571,
						"acc_stderr,none": 0.02678685165920092,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.746031746031746,
						"acc_stderr,none": 0.019408160646774163,
						"alias": " - xwinograd_zh"
					}
				}
			},
			"trust_remote_code=True": {
				"confObj": {
					"trust_remote_code": "True"
				},
				"confStr": "trust_remote_code=True",
				"groups": {
					"lambada_multilingual": {
						"acc,none": 0.49085969338249563,
						"acc_stderr,none": 0.07961188912783704,
						"alias": "lambada_multilingual",
						"perplexity,none": 30.756897900017233,
						"perplexity_stderr,none": 11.775730404632489
					},
					"pawsx": {
						"acc,none": 0.5146428571428572,
						"acc_stderr,none": 0.022954035034775326,
						"alias": "pawsx"
					},
					"xcopa": {
						"acc,none": 0.5901818181818181,
						"acc_stderr,none": 0.06015157170264156,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.42331994645247656,
						"acc_stderr,none": 0.04693183523309166,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5986402743517237,
						"acc_stderr,none": 0.05795056306002414,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7666891436277815,
						"acc_stderr,none": 0.04083376113561301,
						"alias": "xwinograd"
					}
				},
				"results": {
					"lambada_multilingual": {
						"acc,none": 0.49085969338249563,
						"acc_stderr,none": 0.07961188912783704,
						"alias": "lambada_multilingual",
						"perplexity,none": 30.756897900017233,
						"perplexity_stderr,none": 11.775730404632489
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3964680768484378,
						"acc_stderr,none": 0.006815007030417616,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 47.80612542606288,
						"perplexity_stderr,none": 2.7897856450442577
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6887250145546284,
						"acc_stderr,none": 0.006450703968778299,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.1540919408887405,
						"perplexity_stderr,none": 0.08968222595411152
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.41024645837376283,
						"acc_stderr,none": 0.006852827058720168,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 43.23524059898475,
						"perplexity_stderr,none": 2.255182711765601
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.49582767320007765,
						"acc_stderr,none": 0.006965735121159857,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 24.651601219742307,
						"perplexity_stderr,none": 1.2767272688931803
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.4630312439355715,
						"acc_stderr,none": 0.0069469109141427725,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 33.93743031440748,
						"perplexity_stderr,none": 1.892848278831158
					},
					"paws_de": {
						"acc,none": 0.4925,
						"acc_stderr,none": 0.011181877847486001,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.5005,
						"acc_stderr,none": 0.011183130429495192,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4755,
						"acc_stderr,none": 0.011169702598013182,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5555,
						"acc_stderr,none": 0.011114028784284502,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.011175886999478619,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.529,
						"acc_stderr,none": 0.011164310140373718,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5315,
						"acc_stderr,none": 0.01116092102288328,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5146428571428572,
						"acc_stderr,none": 0.022954035034775326,
						"alias": "pawsx"
					},
					"xcopa": {
						"acc,none": 0.5901818181818181,
						"acc_stderr,none": 0.06015157170264156,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.020704041021724805,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.662,
						"acc_stderr,none": 0.02117566569520941,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.486,
						"acc_stderr,none": 0.02237429816635318,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.02216263442665284,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.022221331534143022,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.022270877485360444,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.604,
						"acc_stderr,none": 0.02189352994166582,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.648,
						"acc_stderr,none": 0.021380042385946044,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.67,
						"acc_stderr,none": 0.021049612166134823,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.42331994645247656,
						"acc_stderr,none": 0.04693183523309166,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3369477911646586,
						"acc_stderr,none": 0.009474203778757701,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.42931726907630524,
						"acc_stderr,none": 0.009921425969589916,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.47309236947791167,
						"acc_stderr,none": 0.010007549970702514,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.40200803212851405,
						"acc_stderr,none": 0.009827715873484728,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5164658634538153,
						"acc_stderr,none": 0.010016636930829975,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4855421686746988,
						"acc_stderr,none": 0.010017882185606015,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4819277108433735,
						"acc_stderr,none": 0.010015524156629813,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.39598393574297186,
						"acc_stderr,none": 0.00980280988850235,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.47670682730923697,
						"acc_stderr,none": 0.010011191570021297,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.37269076305220883,
						"acc_stderr,none": 0.009691761259693463,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41164658634538154,
						"acc_stderr,none": 0.009864360821750344,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.42289156626506025,
						"acc_stderr,none": 0.009902179034797426,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3534136546184739,
						"acc_stderr,none": 0.009581698005070962,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42971887550200805,
						"acc_stderr,none": 0.009922572153607775,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3614457831325301,
						"acc_stderr,none": 0.009629594988040065,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5986402743517237,
						"acc_stderr,none": 0.05795056306002414,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5605559232296492,
						"acc_stderr,none": 0.012772408697979139,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7452018530774321,
						"acc_stderr,none": 0.011213640323414547,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6565188616810059,
						"acc_stderr,none": 0.012220432513619242,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5420251489080079,
						"acc_stderr,none": 0.012821595164245273,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5658504301786896,
						"acc_stderr,none": 0.01275504628991222,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6313699536730641,
						"acc_stderr,none": 0.012415060691280351,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5023163467902052,
						"acc_stderr,none": 0.01286698723947804,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6446062210456651,
						"acc_stderr,none": 0.012317247930418378,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.528788881535407,
						"acc_stderr,none": 0.012845779070719498,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5843812045003309,
						"acc_stderr,none": 0.012682569054907635,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6234281932495036,
						"acc_stderr,none": 0.01246891448965935,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7666891436277815,
						"acc_stderr,none": 0.04083376113561301,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8365591397849462,
						"acc_stderr,none": 0.007670268769041714,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6867469879518072,
						"acc_stderr,none": 0.05121994210658146,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6642335766423357,
						"acc_stderr,none": 0.015257953615804233,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7186311787072244,
						"acc_stderr,none": 0.027780519816709794,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6634920634920635,
						"acc_stderr,none": 0.026665559335926008,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7420634920634921,
						"acc_stderr,none": 0.01950711068855576,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "RWKV/rwkv-5-world-3b"
	},
	"RWKV/rwkv-6-world-1b6": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.508173618940248,
						"acc_norm,none": 0.4898534385569335,
						"acc_norm_stderr,none": 0.0728015773083786,
						"acc_stderr,none": 0.09482535477141689,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3334375,
						"acc_stderr,none": 0.017545436428660976,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8339402985074628,
						"acc_stderr,none": 0.14173056930838854,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2541875323778276,
						"acc_norm,none": 0.2541875323778276,
						"acc_norm_stderr,none": 0.040713899089691315,
						"acc_stderr,none": 0.040713899089691315,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5046599571224393,
						"acc_stderr,none": 0.006778784568356254,
						"alias": "glue",
						"f1,none": 0.6044389420002092,
						"f1_stderr,none": 0.0002741097265034049,
						"mcc,none": -0.005804290627022071,
						"mcc_stderr,none": 0.030567895109289367
					},
					"lambada": {
						"acc,none": 0.6403066175043663,
						"acc_stderr,none": 0.01765872475573811,
						"alias": "lambada",
						"perplexity,none": 5.378547574829985,
						"perplexity_stderr,none": 0.40577408005536164
					},
					"lambada_multilingual": {
						"acc,none": 0.4716087715893654,
						"acc_stderr,none": 0.08168128309101076,
						"alias": "lambada_multilingual",
						"perplexity,none": 37.25038421720749,
						"perplexity_stderr,none": 14.263817715060688
					},
					"mmlu": {
						"acc,none": 0.240777666999003,
						"acc_stderr,none": 0.03716787784231046,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.24399574920297556,
						"acc_stderr,none": 0.03014408374212284,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2594142259414226,
						"acc_stderr,none": 0.0406265625788296,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2170945726356841,
						"acc_stderr,none": 0.031501996172087654,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2407231208372978,
						"acc_stderr,none": 0.04245591516359946,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.5056428571428572,
						"acc_stderr,none": 0.022610687930338495,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7158043079669848,
						"acc_norm,none": 0.49752728511242167,
						"acc_norm_stderr,none": 0.00785691502111576,
						"acc_stderr,none": 0.1416605570335328,
						"alias": "pythia",
						"bits_per_byte,none": 0.7065458125461656,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6318922611374063,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.610260723841425,
						"perplexity_stderr,none": 0.10411255078088041,
						"word_perplexity,none": 13.720097767622354,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.2942187671371358,
						"acc_stderr,none": 0.001519944928232785,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2839657282741738,
						"bleu_acc_stderr,none": 0.01578537085839672,
						"bleu_diff,none": -8.110885336013931,
						"bleu_diff_stderr,none": 0.7874567661968082,
						"bleu_max,none": 25.752566233837026,
						"bleu_max_stderr,none": 0.7680320060710114,
						"rouge1_acc,none": 0.29253365973072215,
						"rouge1_acc_stderr,none": 0.015925597445286165,
						"rouge1_diff,none": -10.10522297748055,
						"rouge1_diff_stderr,none": 0.8376766200728099,
						"rouge1_max,none": 50.94818390873027,
						"rouge1_max_stderr,none": 0.8414535948778052,
						"rouge2_acc,none": 0.23623011015911874,
						"rouge2_acc_stderr,none": 0.014869755015871105,
						"rouge2_diff,none": -12.19825913422267,
						"rouge2_diff_stderr,none": 1.0145720576632444,
						"rouge2_max,none": 34.949240764650895,
						"rouge2_max_stderr,none": 0.9741165174759555,
						"rougeL_acc,none": 0.27906976744186046,
						"rougeL_acc_stderr,none": 0.015702107090627904,
						"rougeL_diff,none": -10.510243630141371,
						"rougeL_diff_stderr,none": 0.8516887782098291,
						"rougeL_max,none": 48.09209865497623,
						"rougeL_max_stderr,none": 0.8527860080666048
					},
					"xcopa": {
						"acc,none": 0.5803636363636364,
						"acc_stderr,none": 0.04983041741900531,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.41451137884872824,
						"acc_stderr,none": 0.04901264154004618,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.581673786174117,
						"acc_stderr,none": 0.05652695316734595,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7608451337379186,
						"acc_stderr,none": 0.04463671770667426,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.508173618940248,
						"acc_norm,none": 0.4898534385569335,
						"acc_norm_stderr,none": 0.0728015773083786,
						"acc_stderr,none": 0.09482535477141689,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3334375,
						"acc_stderr,none": 0.017545436428660976,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.313,
						"acc_stderr,none": 0.014671272822977886,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.014933117490932573,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3491666666666667,
						"acc_stderr,none": 0.013767075395077244,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.30802047781569963,
						"acc_norm,none": 0.3370307167235495,
						"acc_norm_stderr,none": 0.013813476652902272,
						"acc_stderr,none": 0.013491429517292038,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6069023569023569,
						"acc_norm,none": 0.5652356902356902,
						"acc_norm_stderr,none": 0.010172083670402777,
						"acc_stderr,none": 0.010022540618945312,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8339402985074628,
						"acc_stderr,none": 0.14173056930838854,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904636,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578115,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437586,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.012486268734370143,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847167,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.759,
						"acc_stderr,none": 0.013531522534515445,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.648,
						"acc_stderr,none": 0.015110404505648661,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.775,
						"acc_stderr,none": 0.01321172015861475,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475287,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426557,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611469,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571408,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897885,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400241,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753651,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727057,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.864,
						"acc_stderr,none": 0.010845350230472988,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.729,
						"acc_stderr,none": 0.014062601350986184,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.813,
						"acc_stderr,none": 0.01233625482807411,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.01120341539516033,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651532,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.842,
						"acc_stderr,none": 0.011539894677559549,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998465,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.369,
						"acc_stderr,none": 0.015266698139154615,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592086,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.814,
						"acc_stderr,none": 0.0123107902084128,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.697,
						"acc_stderr,none": 0.014539683710535255,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235237,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783205,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849877,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295445,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.00923305200078773,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.607,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397347,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.01570498795436179,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.621,
						"acc_stderr,none": 0.015349091002225347,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.663,
						"acc_stderr,none": 0.014955087918653598,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704159,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.823,
						"acc_stderr,none": 0.012075463420375061,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592081,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139981,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.748,
						"acc_stderr,none": 0.013736254390651154,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695474,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.002637794146243767,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235237,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.756,
						"acc_stderr,none": 0.013588548437881414,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.499,
						"acc_stderr,none": 0.01581926829057682,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.005893957816165546,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621257,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.002818500300504506,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.761,
						"acc_stderr,none": 0.013493000446937584,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.429,
						"acc_stderr,none": 0.01565899754787025,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343954,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244083,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.707,
						"acc_stderr,none": 0.014399942998441271,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151129,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.012886662332274553,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.838,
						"acc_stderr,none": 0.011657267771304422,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295433,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118588,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.00420638724961145,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274702,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.015768596914394375,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.374,
						"acc_stderr,none": 0.015308767369006372,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2541875323778276,
						"acc_norm,none": 0.2541875323778276,
						"acc_norm_stderr,none": 0.040713899089691315,
						"acc_stderr,none": 0.040713899089691315,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28994082840236685,
						"acc_norm,none": 0.28994082840236685,
						"acc_norm_stderr,none": 0.03500638924911013,
						"acc_stderr,none": 0.03500638924911013,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.22972972972972974,
						"acc_norm,none": 0.22972972972972974,
						"acc_norm_stderr,none": 0.034695368254076084,
						"acc_stderr,none": 0.034695368254076084,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2865853658536585,
						"acc_norm,none": 0.2865853658536585,
						"acc_norm_stderr,none": 0.03541638332993505,
						"acc_stderr,none": 0.03541638332993505,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.035156741348767645,
						"acc_stderr,none": 0.035156741348767645,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.22424242424242424,
						"acc_norm,none": 0.22424242424242424,
						"acc_norm_stderr,none": 0.032568666616811015,
						"acc_stderr,none": 0.032568666616811015,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2535885167464115,
						"acc_norm,none": 0.2535885167464115,
						"acc_norm_stderr,none": 0.03016631629884799,
						"acc_stderr,none": 0.03016631629884799,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.035156741348767645,
						"acc_stderr,none": 0.035156741348767645,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.22900763358778625,
						"acc_norm,none": 0.22900763358778625,
						"acc_norm_stderr,none": 0.036853466317118506,
						"acc_stderr,none": 0.036853466317118506,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25735294117647056,
						"acc_norm,none": 0.25735294117647056,
						"acc_norm_stderr,none": 0.037626074966240076,
						"acc_stderr,none": 0.037626074966240076,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.21495327102803738,
						"acc_norm,none": 0.21495327102803738,
						"acc_norm_stderr,none": 0.03989944463395407,
						"acc_stderr,none": 0.03989944463395407,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.24148606811145512,
						"acc_norm,none": 0.24148606811145512,
						"acc_norm_stderr,none": 0.02385063165820596,
						"acc_stderr,none": 0.02385063165820596,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03039153369274154,
						"acc_stderr,none": 0.03039153369274154,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2569832402234637,
						"acc_norm,none": 0.2569832402234637,
						"acc_norm_stderr,none": 0.032752292523561655,
						"acc_stderr,none": 0.032752292523561655,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24472573839662448,
						"acc_norm,none": 0.24472573839662448,
						"acc_norm_stderr,none": 0.027985699387036416,
						"acc_stderr,none": 0.027985699387036416,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.3113207547169811,
						"acc_norm,none": 0.3113207547169811,
						"acc_norm_stderr,none": 0.0451874553177075,
						"acc_stderr,none": 0.0451874553177075,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.2616822429906542,
						"acc_norm,none": 0.2616822429906542,
						"acc_norm_stderr,none": 0.042692919157281094,
						"acc_stderr,none": 0.042692919157281094,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.046216787599682646,
						"acc_stderr,none": 0.046216787599682646,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04186091791394607,
						"acc_stderr,none": 0.04186091791394607,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.04429811949614584,
						"acc_stderr,none": 0.04429811949614584,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371222,
						"acc_stderr,none": 0.04198857662371222,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.25274725274725274,
						"acc_norm,none": 0.25274725274725274,
						"acc_norm_stderr,none": 0.0263507226555644,
						"acc_stderr,none": 0.0263507226555644,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.23529411764705882,
						"acc_norm,none": 0.23529411764705882,
						"acc_norm_stderr,none": 0.029771775228145638,
						"acc_stderr,none": 0.029771775228145638,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.26900584795321636,
						"acc_norm,none": 0.26900584795321636,
						"acc_norm_stderr,none": 0.0340105262010409,
						"acc_stderr,none": 0.0340105262010409,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.036538475108960564,
						"acc_stderr,none": 0.036538475108960564,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.26618705035971224,
						"acc_norm,none": 0.26618705035971224,
						"acc_norm_stderr,none": 0.037622409350890895,
						"acc_stderr,none": 0.037622409350890895,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.25157232704402516,
						"acc_norm,none": 0.25157232704402516,
						"acc_norm_stderr,none": 0.034520558111649044,
						"acc_stderr,none": 0.034520558111649044,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25153374233128833,
						"acc_norm,none": 0.25153374233128833,
						"acc_norm_stderr,none": 0.034089978868575295,
						"acc_stderr,none": 0.034089978868575295,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.03430085607014882,
						"acc_stderr,none": 0.03430085607014882,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.3253968253968254,
						"acc_norm,none": 0.3253968253968254,
						"acc_norm_stderr,none": 0.029572904809613526,
						"acc_stderr,none": 0.029572904809613526,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03053289223393202,
						"acc_stderr,none": 0.03053289223393202,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3067226890756303,
						"acc_norm,none": 0.3067226890756303,
						"acc_norm_stderr,none": 0.029953823891887058,
						"acc_stderr,none": 0.029953823891887058,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2391304347826087,
						"acc_norm,none": 0.2391304347826087,
						"acc_norm_stderr,none": 0.028187385293933935,
						"acc_stderr,none": 0.028187385293933935,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.22962962962962963,
						"acc_norm,none": 0.22962962962962963,
						"acc_norm_stderr,none": 0.036333844140734636,
						"acc_stderr,none": 0.036333844140734636,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.036421927837417066,
						"acc_stderr,none": 0.036421927837417066,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26704545454545453,
						"acc_norm,none": 0.26704545454545453,
						"acc_norm_stderr,none": 0.03344352850079127,
						"acc_stderr,none": 0.03344352850079127,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.24161073825503357,
						"acc_norm,none": 0.24161073825503357,
						"acc_norm_stderr,none": 0.03518627932594346,
						"acc_stderr,none": 0.03518627932594346,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676974,
						"acc_stderr,none": 0.03410167836676974,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.26515151515151514,
						"acc_norm,none": 0.26515151515151514,
						"acc_norm_stderr,none": 0.038566507358125605,
						"acc_stderr,none": 0.038566507358125605,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714263,
						"acc_stderr,none": 0.04025566684714263,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.03494959016177541,
						"acc_stderr,none": 0.03494959016177541,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.041723430387053825,
						"acc_stderr,none": 0.041723430387053825,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.03535681229053241,
						"acc_stderr,none": 0.03535681229053241,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.03718489006818115,
						"acc_stderr,none": 0.03718489006818115,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.23243243243243245,
						"acc_norm,none": 0.23243243243243245,
						"acc_norm_stderr,none": 0.031138505170794667,
						"acc_stderr,none": 0.031138505170794667,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2616279069767442,
						"acc_norm,none": 0.2616279069767442,
						"acc_norm_stderr,none": 0.033611014038904936,
						"acc_stderr,none": 0.033611014038904936,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2360097323600973,
						"acc_norm,none": 0.2360097323600973,
						"acc_norm_stderr,none": 0.020970893800892804,
						"acc_stderr,none": 0.020970893800892804,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.21962616822429906,
						"acc_norm,none": 0.21962616822429906,
						"acc_norm_stderr,none": 0.028366358642017562,
						"acc_stderr,none": 0.028366358642017562,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03887917804888516,
						"acc_stderr,none": 0.03887917804888516,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.28688524590163933,
						"acc_norm,none": 0.28688524590163933,
						"acc_norm_stderr,none": 0.041118866352671826,
						"acc_stderr,none": 0.041118866352671826,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.03041268445992877,
						"acc_stderr,none": 0.03041268445992877,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2833333333333333,
						"acc_norm,none": 0.2833333333333333,
						"acc_norm_stderr,none": 0.033680685541162235,
						"acc_stderr,none": 0.033680685541162235,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24338624338624337,
						"acc_norm,none": 0.24338624338624337,
						"acc_norm_stderr,none": 0.03129725192855849,
						"acc_stderr,none": 0.03129725192855849,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.04083221538649575,
						"acc_stderr,none": 0.04083221538649575,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.25517241379310346,
						"acc_norm,none": 0.25517241379310346,
						"acc_norm_stderr,none": 0.03632984052707842,
						"acc_stderr,none": 0.03632984052707842,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.03922322702763677,
						"acc_stderr,none": 0.03922322702763677,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.03313334329221721,
						"acc_stderr,none": 0.03313334329221721,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.25118483412322273,
						"acc_norm,none": 0.25118483412322273,
						"acc_norm_stderr,none": 0.029927771242945204,
						"acc_stderr,none": 0.029927771242945204,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.24468085106382978,
						"acc_norm,none": 0.24468085106382978,
						"acc_norm_stderr,none": 0.022199827758281308,
						"acc_stderr,none": 0.022199827758281308,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.2629310344827586,
						"acc_norm,none": 0.2629310344827586,
						"acc_norm_stderr,none": 0.02896469754454016,
						"acc_stderr,none": 0.02896469754454016,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.23563218390804597,
						"acc_norm,none": 0.23563218390804597,
						"acc_norm_stderr,none": 0.03226602373932447,
						"acc_stderr,none": 0.03226602373932447,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.03591444084196969,
						"acc_stderr,none": 0.03591444084196969,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.24336283185840707,
						"acc_norm,none": 0.24336283185840707,
						"acc_norm_stderr,none": 0.02860748650485771,
						"acc_stderr,none": 0.02860748650485771,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2810810810810811,
						"acc_norm,none": 0.2810810810810811,
						"acc_norm_stderr,none": 0.03313956873549873,
						"acc_stderr,none": 0.03313956873549873,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.03250593287417369,
						"acc_stderr,none": 0.03250593287417369,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.029965323374109197,
						"acc_stderr,none": 0.029965323374109197,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.035156741348767645,
						"acc_stderr,none": 0.035156741348767645,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.005804290627022071,
						"mcc_stderr,none": 0.030567895109289367
					},
					"copa": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.04163331998932261,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5046599571224393,
						"acc_stderr,none": 0.006778784568356254,
						"alias": "glue",
						"f1,none": 0.6044389420002092,
						"f1_stderr,none": 0.0002741097265034049,
						"mcc,none": -0.005804290627022071,
						"mcc_stderr,none": 0.030567895109289367
					},
					"hellaswag": {
						"acc,none": 0.46325433180641307,
						"acc_norm,none": 0.6143198566022705,
						"acc_norm_stderr,none": 0.004857607641160633,
						"acc_stderr,none": 0.004976288321681822,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.6403066175043663,
						"acc_stderr,none": 0.01765872475573811,
						"alias": "lambada",
						"perplexity,none": 5.378547574829985,
						"perplexity_stderr,none": 0.40577408005536164
					},
					"lambada_multilingual": {
						"acc,none": 0.4716087715893654,
						"acc_stderr,none": 0.08168128309101076,
						"alias": "lambada_multilingual",
						"perplexity,none": 37.25038421720749,
						"perplexity_stderr,none": 14.263817715060688
					},
					"lambada_openai": {
						"acc,none": 0.672811954201436,
						"acc_stderr,none": 0.006536686193974618,
						"alias": " - lambada_openai",
						"perplexity,none": 4.610260723841425,
						"perplexity_stderr,none": 0.10411255078088041
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3683291286629148,
						"acc_stderr,none": 0.006720096830071419,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 57.68264824870673,
						"perplexity_stderr,none": 3.3845468518910007
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.673394139336309,
						"acc_stderr,none": 0.0065336930212616965,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.609914134913992,
						"perplexity_stderr,none": 0.10409462148847845
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.3960799534251892,
						"acc_stderr,none": 0.006813860325177773,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 51.370873595176214,
						"perplexity_stderr,none": 2.7367802165572153
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.477779934019018,
						"acc_stderr,none": 0.006959095614775139,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 29.974941724670625,
						"perplexity_stderr,none": 1.5808708788196977
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.4424607025033961,
						"acc_stderr,none": 0.006919698416337258,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 42.61354338256991,
						"perplexity_stderr,none": 2.4433516023227355
					},
					"lambada_standard": {
						"acc,none": 0.6076072190956724,
						"acc_stderr,none": 0.0068027426191620294,
						"alias": " - lambada_standard",
						"perplexity,none": 6.146652595794247,
						"perplexity_stderr,none": 0.15313032791396983
					},
					"logiqa": {
						"acc,none": 0.22734254992319508,
						"acc_norm,none": 0.30414746543778803,
						"acc_norm_stderr,none": 0.01804446579150677,
						"acc_stderr,none": 0.01643906767511774,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.240777666999003,
						"acc_stderr,none": 0.03716787784231046,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816507,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.1925925925925926,
						"acc_stderr,none": 0.0340654205850265,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.18421052631578946,
						"acc_stderr,none": 0.0315469804508223,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.047609522856952365,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.23773584905660378,
						"acc_stderr,none": 0.02619980880756191,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.03621034121889507,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.03861229196653694,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421276,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2658959537572254,
						"acc_stderr,none": 0.033687629322594316,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.04280105837364396,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2723404255319149,
						"acc_stderr,none": 0.0291012906983867,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.04049339297748142,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.23448275862068965,
						"acc_stderr,none": 0.035306258743465914,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.02141168439369419,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.25396825396825395,
						"acc_stderr,none": 0.03893259610604674,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.24516129032258063,
						"acc_stderr,none": 0.024472243840895514,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.23645320197044334,
						"acc_stderr,none": 0.029896114291733545,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3151515151515151,
						"acc_stderr,none": 0.0362773057502241,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.18181818181818182,
						"acc_stderr,none": 0.027479603010538804,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.25906735751295334,
						"acc_stderr,none": 0.031618779179354094,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.19230769230769232,
						"acc_stderr,none": 0.01998234720863729,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.026067159222275805,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.22268907563025211,
						"acc_stderr,none": 0.02702543349888238,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.25165562913907286,
						"acc_stderr,none": 0.035433042343899844,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.21100917431192662,
						"acc_stderr,none": 0.017493922404112648,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.028353212866863445,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.028867431449849303,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.28270042194092826,
						"acc_stderr,none": 0.029312814153955917,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.31390134529147984,
						"acc_stderr,none": 0.031146796482972465,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.21374045801526717,
						"acc_stderr,none": 0.0359546161177469,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.24399574920297556,
						"acc_stderr,none": 0.03014408374212284,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.19834710743801653,
						"acc_stderr,none": 0.036401182719909456,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.04236511258094634,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.26993865030674846,
						"acc_stderr,none": 0.03487825168497892,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.33035714285714285,
						"acc_stderr,none": 0.04464285714285713,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.24271844660194175,
						"acc_stderr,none": 0.04245022486384495,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.26495726495726496,
						"acc_stderr,none": 0.028911208802749465,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2567049808429119,
						"acc_stderr,none": 0.015620480263064524,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.22832369942196531,
						"acc_stderr,none": 0.02259870380432161,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24581005586592178,
						"acc_stderr,none": 0.014400296429225606,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.025261691219729487,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2594142259414226,
						"acc_stderr,none": 0.0406265625788296,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.21864951768488747,
						"acc_stderr,none": 0.02347558141786111,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2345679012345679,
						"acc_stderr,none": 0.02357688174400572,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.18085106382978725,
						"acc_stderr,none": 0.022960894850119137,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2405475880052151,
						"acc_stderr,none": 0.01091640673547895,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.22794117647058823,
						"acc_stderr,none": 0.025483081468029804,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2369281045751634,
						"acc_stderr,none": 0.017201662169789772,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.24545454545454545,
						"acc_stderr,none": 0.041220665028782855,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.19183673469387755,
						"acc_stderr,none": 0.025206963154225378,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2170945726356841,
						"acc_stderr,none": 0.031501996172087654,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.208955223880597,
						"acc_stderr,none": 0.028748298931728655,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2407231208372978,
						"acc_stderr,none": 0.04245591516359946,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3072289156626506,
						"acc_stderr,none": 0.03591566797824663,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.03377310252209193,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.356698930208864,
						"acc_stderr,none": 0.0048354290289595305,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3569975589910496,
						"acc_stderr,none": 0.004832146426498175,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6593137254901961,
						"acc_stderr,none": 0.023492334306757023,
						"alias": " - mrpc",
						"f1,none": 0.7804107424960506,
						"f1_stderr,none": 0.018209981254073963
					},
					"openbookqa": {
						"acc,none": 0.244,
						"acc_norm,none": 0.376,
						"acc_norm_stderr,none": 0.02168382753928612,
						"acc_stderr,none": 0.01922673489361458,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.484,
						"acc_stderr,none": 0.011177408788874894,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4795,
						"acc_stderr,none": 0.011173732641806815,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.011180899170152973,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5445,
						"acc_stderr,none": 0.011138757154883975,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.011181704488030002,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.551,
						"acc_stderr,none": 0.011124809242874423,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4985,
						"acc_stderr,none": 0.0111830856968392,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5056428571428572,
						"acc_stderr,none": 0.022610687930338495,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7404787812840044,
						"acc_norm,none": 0.7372143634385201,
						"acc_norm_stderr,none": 0.010269354068140772,
						"acc_stderr,none": 0.010227939888173927,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7158043079669848,
						"acc_norm,none": 0.49752728511242167,
						"acc_norm_stderr,none": 0.00785691502111576,
						"acc_stderr,none": 0.1416605570335328,
						"alias": "pythia",
						"bits_per_byte,none": 0.7065458125461656,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6318922611374063,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.610260723841425,
						"perplexity_stderr,none": 0.10411255078088041,
						"word_perplexity,none": 13.720097767622354,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.4982610287387882,
						"acc_stderr,none": 0.0067653696341649396,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.574795943606233,
						"acc_stderr,none": 0.0024587198483537968,
						"alias": " - qqp",
						"f1,none": 0.6029150209040722,
						"f1_stderr,none": 0.002783607693608507
					},
					"record": {
						"alias": "record",
						"em,none": 0.2845,
						"em_stderr,none": 0.004511985239456891,
						"f1,none": 0.2934285716474056,
						"f1_stderr,none": 0.004517814402620982
					},
					"rte": {
						"acc,none": 0.6101083032490975,
						"acc_stderr,none": 0.029357625083848062,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.884,
						"acc_norm,none": 0.843,
						"acc_norm_stderr,none": 0.011510146979230185,
						"acc_stderr,none": 0.010131468138756993,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.5172018348623854,
						"acc_stderr,none": 0.01693182442590374,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.2942187671371358,
						"acc_stderr,none": 0.001519944928232785,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2839657282741738,
						"bleu_acc_stderr,none": 0.01578537085839672,
						"bleu_diff,none": -8.110885336013931,
						"bleu_diff_stderr,none": 0.7874567661968082,
						"bleu_max,none": 25.752566233837026,
						"bleu_max_stderr,none": 0.7680320060710114,
						"rouge1_acc,none": 0.29253365973072215,
						"rouge1_acc_stderr,none": 0.015925597445286165,
						"rouge1_diff,none": -10.10522297748055,
						"rouge1_diff_stderr,none": 0.8376766200728099,
						"rouge1_max,none": 50.94818390873027,
						"rouge1_max_stderr,none": 0.8414535948778052,
						"rouge2_acc,none": 0.23623011015911874,
						"rouge2_acc_stderr,none": 0.014869755015871105,
						"rouge2_diff,none": -12.19825913422267,
						"rouge2_diff_stderr,none": 1.0145720576632444,
						"rouge2_max,none": 34.949240764650895,
						"rouge2_max_stderr,none": 0.9741165174759555,
						"rougeL_acc,none": 0.27906976744186046,
						"rougeL_acc_stderr,none": 0.015702107090627904,
						"rougeL_diff,none": -10.510243630141371,
						"rougeL_diff_stderr,none": 0.8516887782098291,
						"rougeL_max,none": 48.09209865497623,
						"rougeL_max_stderr,none": 0.8527860080666048
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2839657282741738,
						"bleu_acc_stderr,none": 0.01578537085839672,
						"bleu_diff,none": -8.110885336013931,
						"bleu_diff_stderr,none": 0.7874567661968082,
						"bleu_max,none": 25.752566233837026,
						"bleu_max_stderr,none": 0.7680320060710114,
						"rouge1_acc,none": 0.29253365973072215,
						"rouge1_acc_stderr,none": 0.015925597445286165,
						"rouge1_diff,none": -10.10522297748055,
						"rouge1_diff_stderr,none": 0.8376766200728099,
						"rouge1_max,none": 50.94818390873027,
						"rouge1_max_stderr,none": 0.8414535948778052,
						"rouge2_acc,none": 0.23623011015911874,
						"rouge2_acc_stderr,none": 0.014869755015871105,
						"rouge2_diff,none": -12.19825913422267,
						"rouge2_diff_stderr,none": 1.0145720576632444,
						"rouge2_max,none": 34.949240764650895,
						"rouge2_max_stderr,none": 0.9741165174759555,
						"rougeL_acc,none": 0.27906976744186046,
						"rougeL_acc_stderr,none": 0.015702107090627904,
						"rougeL_diff,none": -10.510243630141371,
						"rougeL_diff_stderr,none": 0.8516887782098291,
						"rougeL_max,none": 48.09209865497623,
						"rougeL_max_stderr,none": 0.8527860080666048
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2215422276621787,
						"acc_stderr,none": 0.01453786760130114,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.36689530661209285,
						"acc_stderr,none": 0.013646494016726384,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.7065458125461656,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6318922611374063,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 13.720097767622354,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6006314127861089,
						"acc_stderr,none": 0.013764933546717614,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5070422535211268,
						"acc_stderr,none": 0.059755502635482904,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.5803636363636364,
						"acc_stderr,none": 0.04983041741900531,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.022175109265613165,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.654,
						"acc_stderr,none": 0.021294951277234637,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.644,
						"acc_stderr,none": 0.021434712356072656,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269948,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.022311333245289663,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.02224224437573102,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.592,
						"acc_stderr,none": 0.022000910893877186,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.642,
						"acc_stderr,none": 0.021461434862859126,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.632,
						"acc_stderr,none": 0.021588982568353544,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.41451137884872824,
						"acc_stderr,none": 0.04901264154004618,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337345,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.41887550200803214,
						"acc_stderr,none": 0.009889278882314558,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4674698795180723,
						"acc_stderr,none": 0.010000839483876025,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3710843373493976,
						"acc_stderr,none": 0.009683226021349287,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.52570281124498,
						"acc_stderr,none": 0.010008822253312066,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4791164658634538,
						"acc_stderr,none": 0.010013327358568523,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4791164658634538,
						"acc_stderr,none": 0.010013327358568523,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.39156626506024095,
						"acc_stderr,none": 0.00978355810999709,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.43253012048192774,
						"acc_stderr,none": 0.009930409027139458,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3598393574297189,
						"acc_stderr,none": 0.009620250217765991,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.43373493975903615,
						"acc_stderr,none": 0.009933667945702076,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3963855421686747,
						"acc_stderr,none": 0.009804518520476648,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3506024096385542,
						"acc_stderr,none": 0.009564237156206102,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.43775100401606426,
						"acc_stderr,none": 0.00994409973429016,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3381526104417671,
						"acc_stderr,none": 0.009482500057981031,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.581673786174117,
						"acc_stderr,none": 0.05652695316734595,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5519523494374586,
						"acc_stderr,none": 0.012797478885304739,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7346128391793514,
						"acc_stderr,none": 0.011362678996097103,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6366644606221046,
						"acc_stderr,none": 0.012377153306613275,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.528788881535407,
						"acc_stderr,none": 0.012845779070719498,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5506287227001986,
						"acc_stderr,none": 0.012800991591293371,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6015883520847121,
						"acc_stderr,none": 0.012598743938252875,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.49503639973527463,
						"acc_stderr,none": 0.012866491277589926,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6181336863004633,
						"acc_stderr,none": 0.012502832319146888,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5168762409000662,
						"acc_stderr,none": 0.0128597939199776,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5612177365982793,
						"acc_stderr,none": 0.012770319186938005,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6029119788219722,
						"acc_stderr,none": 0.012591627740247465,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7608451337379186,
						"acc_stderr,none": 0.04463671770667426,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.836989247311828,
						"acc_stderr,none": 0.007662138603849466,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6475495307612096,
						"acc_stderr,none": 0.015434863337068146,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6768060836501901,
						"acc_stderr,none": 0.02889435936291789,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.653968253968254,
						"acc_stderr,none": 0.026845499021972877,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7440476190476191,
						"acc_stderr,none": 0.01945789968402802,
						"alias": " - xwinograd_zh"
					}
				}
			},
			"dtype=float16,trust_remote_code=True": {
				"confObj": {
					"dtype": "float16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=float16,trust_remote_code=True",
				"groups": {
					"mmlu": {
						"acc,none": 0.2604329867540237,
						"acc_stderr,none": 0.044784656811631046,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.25207226354941553,
						"acc_stderr,none": 0.030676103329182393,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2684261345349212,
						"acc_stderr,none": 0.05226815008470601,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2619434514137146,
						"acc_stderr,none": 0.045396528210999806,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.26355851569933403,
						"acc_stderr,none": 0.05236897885736381,
						"alias": " - stem"
					}
				},
				"results": {
					"arc_challenge": {
						"acc,none": 0.33361774744027306,
						"acc_norm,none": 0.3720136518771331,
						"acc_norm_stderr,none": 0.014124597881844461,
						"acc_stderr,none": 0.013778687054176545,
						"alias": "arc_challenge"
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.01819560272934041,
						"exact_match_stderr,get-answer": 0.0036816118940738735
					},
					"hellaswag": {
						"acc,none": 0.45817566221868156,
						"acc_norm,none": 0.6082453694483171,
						"acc_norm_stderr,none": 0.0048714471065549264,
						"acc_stderr,none": 0.004972293764978729,
						"alias": "hellaswag"
					},
					"mmlu": {
						"acc,none": 0.2604329867540237,
						"acc_stderr,none": 0.044784656811631046,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768077,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.22962962962962963,
						"acc_stderr,none": 0.036333844140734636,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.20394736842105263,
						"acc_stderr,none": 0.032790004063100495,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2037735849056604,
						"acc_stderr,none": 0.024790784501775402,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.24305555555555555,
						"acc_stderr,none": 0.03586879280080341,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252606,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.24277456647398843,
						"acc_stderr,none": 0.0326926380614177,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.04336432707993177,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.32340425531914896,
						"acc_stderr,none": 0.030579442773610337,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03947152782669415,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2413793103448276,
						"acc_stderr,none": 0.03565998174135302,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.23809523809523808,
						"acc_stderr,none": 0.02193587808118476,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.25396825396825395,
						"acc_stderr,none": 0.03893259610604674,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.29354838709677417,
						"acc_stderr,none": 0.025906087021319295,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.19704433497536947,
						"acc_stderr,none": 0.027986724666736223,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.24848484848484848,
						"acc_stderr,none": 0.03374402644139406,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.22727272727272727,
						"acc_stderr,none": 0.02985751567338642,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.29015544041450775,
						"acc_stderr,none": 0.032752644677915166,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.023901157979402538,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23333333333333334,
						"acc_stderr,none": 0.0257878742209593,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2689075630252101,
						"acc_stderr,none": 0.02880139219363128,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23841059602649006,
						"acc_stderr,none": 0.034791855725996586,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.22201834862385322,
						"acc_stderr,none": 0.01781884956479663,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.3148148148148148,
						"acc_stderr,none": 0.03167468706828979,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.27941176470588236,
						"acc_stderr,none": 0.031493281045079556,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2742616033755274,
						"acc_stderr,none": 0.02904133351059804,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3004484304932735,
						"acc_stderr,none": 0.030769352008229136,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.29770992366412213,
						"acc_stderr,none": 0.04010358942462203,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.25207226354941553,
						"acc_stderr,none": 0.030676103329182393,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2396694214876033,
						"acc_stderr,none": 0.03896878985070417,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2037037037037037,
						"acc_stderr,none": 0.03893542518824847,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2883435582822086,
						"acc_stderr,none": 0.035590395316173425,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04109974682633932,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2912621359223301,
						"acc_stderr,none": 0.044986763205729224,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.29914529914529914,
						"acc_stderr,none": 0.02999695185834948,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2413793103448276,
						"acc_stderr,none": 0.015302380123542087,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.24566473988439305,
						"acc_stderr,none": 0.023176298203992,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2245810055865922,
						"acc_stderr,none": 0.013956803666544637,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.02463004897982476,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2684261345349212,
						"acc_stderr,none": 0.05226815008470601,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2733118971061093,
						"acc_stderr,none": 0.02531176597542612,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.26851851851851855,
						"acc_stderr,none": 0.024659685185967287,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2375886524822695,
						"acc_stderr,none": 0.025389512552729903,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2470664928292047,
						"acc_stderr,none": 0.01101575225527933,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4375,
						"acc_stderr,none": 0.030134614954403924,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2565359477124183,
						"acc_stderr,none": 0.01766784161237899,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.15454545454545454,
						"acc_stderr,none": 0.03462262571262667,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2653061224489796,
						"acc_stderr,none": 0.02826388994378458,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2619434514137146,
						"acc_stderr,none": 0.045396528210999806,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.2537313432835821,
						"acc_stderr,none": 0.03076944496729602,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.26355851569933403,
						"acc_stderr,none": 0.05236897885736381,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3072289156626506,
						"acc_stderr,none": 0.03591566797824664,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.32748538011695905,
						"acc_stderr,none": 0.035993357714560276,
						"alias": "  - world_religions"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.36690742554589056,
						"acc_stderr,none": 0.01364753716205304,
						"alias": "truthfulqa_mc2"
					},
					"winogrande": {
						"acc,none": 0.6322020520915549,
						"acc_stderr,none": 0.013552385559833596,
						"alias": "winogrande"
					}
				}
			}
		},
		"name": "RWKV/rwkv-6-world-1b6"
	},
	"RWKV/rwkv-6-world-3b": {
		"config": {
			"dtype=float16,trust_remote_code=True": {
				"confObj": {
					"dtype": "float16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=float16,trust_remote_code=True",
				"groups": {
					"mmlu": {
						"acc,none": 0.2646346674262926,
						"acc_stderr,none": 0.044081442274649114,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.25759829968119025,
						"acc_stderr,none": 0.032576752186157666,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2796910202767943,
						"acc_stderr,none": 0.04086818955473523,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.260318492037699,
						"acc_stderr,none": 0.04302811106279924,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.26450999048525214,
						"acc_stderr,none": 0.05881482698027433,
						"alias": " - stem"
					}
				},
				"results": {
					"arc_challenge": {
						"acc,none": 0.3779863481228669,
						"acc_norm,none": 0.40784982935153585,
						"acc_norm_stderr,none": 0.014361097288449696,
						"acc_stderr,none": 0.014169664520303101,
						"alias": "arc_challenge"
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.01592115238817286,
						"exact_match_stderr,get-answer": 0.0034478192723890006
					},
					"hellaswag": {
						"acc,none": 0.4814777932682733,
						"acc_norm,none": 0.6461860187213703,
						"acc_norm_stderr,none": 0.004771751187407033,
						"acc_stderr,none": 0.004986356526063966,
						"alias": "hellaswag"
					},
					"mmlu": {
						"acc,none": 0.2646346674262926,
						"acc_stderr,none": 0.044081442274649114,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.18518518518518517,
						"acc_stderr,none": 0.0335567721631314,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.125,
						"acc_stderr,none": 0.026913523521537846,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2679245283018868,
						"acc_stderr,none": 0.02725726032249485,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.24305555555555555,
						"acc_stderr,none": 0.03586879280080342,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.17,
						"acc_stderr,none": 0.0377525168068637,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.0416333199893227,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2023121387283237,
						"acc_stderr,none": 0.03063114553919882,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.04336432707993176,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3021276595744681,
						"acc_stderr,none": 0.030017554471880557,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.04142439719489362,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2482758620689655,
						"acc_stderr,none": 0.036001056927277716,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2751322751322751,
						"acc_stderr,none": 0.023000086859068656,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.18253968253968253,
						"acc_stderr,none": 0.03455071019102147,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2709677419354839,
						"acc_stderr,none": 0.025284416114900156,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.3054187192118227,
						"acc_stderr,none": 0.032406615658684086,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3393939393939394,
						"acc_stderr,none": 0.03697442205031595,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.02962022787479048,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.3316062176165803,
						"acc_stderr,none": 0.03397636541089117,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2512820512820513,
						"acc_stderr,none": 0.021992016662370554,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24814814814814815,
						"acc_stderr,none": 0.0263357394040558,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3445378151260504,
						"acc_stderr,none": 0.030868682604121633,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.24036697247706423,
						"acc_stderr,none": 0.01832060732096407,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.4027777777777778,
						"acc_stderr,none": 0.03344887382997866,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.25980392156862747,
						"acc_stderr,none": 0.03077855467869326,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.25316455696202533,
						"acc_stderr,none": 0.0283046579430353,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.336322869955157,
						"acc_stderr,none": 0.031708824268455,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2595419847328244,
						"acc_stderr,none": 0.03844876139785271,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.25759829968119025,
						"acc_stderr,none": 0.032576752186157666,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.256198347107438,
						"acc_stderr,none": 0.03984979653302871,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.04330043749650741,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.294478527607362,
						"acc_stderr,none": 0.03581165790474082,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.29464285714285715,
						"acc_stderr,none": 0.04327040932578728,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.24271844660194175,
						"acc_stderr,none": 0.04245022486384495,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.27350427350427353,
						"acc_stderr,none": 0.02920254015343118,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.015982814774695625,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.25722543352601157,
						"acc_stderr,none": 0.023532925431044283,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2335195530726257,
						"acc_stderr,none": 0.014149575348976259,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.24183006535947713,
						"acc_stderr,none": 0.024518195641879334,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2796910202767943,
						"acc_stderr,none": 0.04086818955473523,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2604501607717042,
						"acc_stderr,none": 0.024926723224845553,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.29012345679012347,
						"acc_stderr,none": 0.025251173936495022,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.26595744680851063,
						"acc_stderr,none": 0.026358065698880585,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2529335071707953,
						"acc_stderr,none": 0.011102268713839989,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.35661764705882354,
						"acc_stderr,none": 0.02909720956841195,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.272875816993464,
						"acc_stderr,none": 0.018020474148393577,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2909090909090909,
						"acc_stderr,none": 0.04350271442923243,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.1836734693877551,
						"acc_stderr,none": 0.024789071332007657,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.260318492037699,
						"acc_stderr,none": 0.04302811106279924,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.22885572139303484,
						"acc_stderr,none": 0.02970528405677244,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.26450999048525214,
						"acc_stderr,none": 0.05881482698027433,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421276,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.29518072289156627,
						"acc_stderr,none": 0.0355092018568963,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.29239766081871343,
						"acc_stderr,none": 0.03488647713457922,
						"alias": "  - world_religions"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3656581410412328,
						"acc_stderr,none": 0.013828148207503357,
						"alias": "truthfulqa_mc2"
					},
					"winogrande": {
						"acc,none": 0.6535122336227308,
						"acc_stderr,none": 0.01337377341168565,
						"alias": "winogrande"
					}
				}
			}
		},
		"name": "RWKV/rwkv-6-world-3b"
	},
	"RWKV/rwkv-6-world-3b-v2.1": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5597519729425028,
						"acc_norm,none": 0.5380496054114995,
						"acc_norm_stderr,none": 0.081325040666589,
						"acc_stderr,none": 0.10496551941589514,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.35375,
						"acc_stderr,none": 0.01577628401219265,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.841044776119403,
						"acc_stderr,none": 0.14697750407730398,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.2868243826627526,
						"acc_norm,none": 0.2868243826627526,
						"acc_norm_stderr,none": 0.049648038192079365,
						"acc_stderr,none": 0.049648038192079365,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5978144354454502,
						"acc_stderr,none": 0.01682655066263625,
						"alias": "glue",
						"f1,none": 0.44793443052418713,
						"f1_stderr,none": 0.0004358142543351673,
						"mcc,none": 0.01769581142844805,
						"mcc_stderr,none": 0.03199086940323714
					},
					"lambada": {
						"acc,none": 0.6853289346012031,
						"acc_stderr,none": 0.016145050162523537,
						"alias": "lambada",
						"perplexity,none": 4.35049429687675,
						"perplexity_stderr,none": 0.2650173014168244
					},
					"lambada_multilingual": {
						"acc,none": 0.5096448670677275,
						"acc_stderr,none": 0.08355407022874216,
						"alias": "lambada_multilingual",
						"perplexity,none": 27.780662609583274,
						"perplexity_stderr,none": 10.701381871528621
					},
					"mmlu": {
						"acc,none": 0.2845036319612591,
						"acc_stderr,none": 0.04443329140354049,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2960680127523911,
						"acc_stderr,none": 0.03995770487948786,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.28838107499195365,
						"acc_stderr,none": 0.050820289583667876,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.27331816704582385,
						"acc_stderr,none": 0.037323071068753,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2743418966064065,
						"acc_stderr,none": 0.04811731274082466,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.49064285714285716,
						"acc_stderr,none": 0.03494544151236283,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7333607762891676,
						"acc_norm,none": 0.5449173429966196,
						"acc_norm_stderr,none": 0.009123237467138025,
						"acc_stderr,none": 0.1432506657057485,
						"alias": "pythia",
						"bits_per_byte,none": 0.6681164850500348,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5889970926784391,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.8554275983682285,
						"perplexity_stderr,none": 0.0795803383257617,
						"word_perplexity,none": 11.898599962078784,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3021669907898528,
						"acc_stderr,none": 0.0013745202304622213,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2998776009791922,
						"bleu_acc_stderr,none": 0.016040352966713627,
						"bleu_diff,none": -8.537576383691066,
						"bleu_diff_stderr,none": 0.7979385066435613,
						"bleu_max,none": 25.534289248490584,
						"bleu_max_stderr,none": 0.7738217621519946,
						"rouge1_acc,none": 0.28518971848225216,
						"rouge1_acc_stderr,none": 0.015805827874454895,
						"rouge1_diff,none": -10.311220318370816,
						"rouge1_diff_stderr,none": 0.8452398723460741,
						"rouge1_max,none": 50.985648682662436,
						"rouge1_max_stderr,none": 0.8328524416427945,
						"rouge2_acc,none": 0.22888616891064872,
						"rouge2_acc_stderr,none": 0.014706994909055027,
						"rouge2_diff,none": -12.872902047541107,
						"rouge2_diff_stderr,none": 1.0280775057763192,
						"rouge2_max,none": 34.3582780532621,
						"rouge2_max_stderr,none": 0.9761613335634386,
						"rougeL_acc,none": 0.2717258261933905,
						"rougeL_acc_stderr,none": 0.01557284045287583,
						"rougeL_diff,none": -10.895617033214727,
						"rougeL_diff_stderr,none": 0.85659291548201,
						"rougeL_max,none": 47.90125789226123,
						"rougeL_max_stderr,none": 0.8485916680169915
					},
					"xcopa": {
						"acc,none": 0.598909090909091,
						"acc_stderr,none": 0.06248804218701853,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.44144578313253013,
						"acc_stderr,none": 0.04973010330498448,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6098309367667408,
						"acc_stderr,none": 0.061667967777701574,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7792762418521016,
						"acc_stderr,none": 0.045618169886461044,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5597519729425028,
						"acc_norm,none": 0.5380496054114995,
						"acc_norm_stderr,none": 0.081325040666589,
						"acc_stderr,none": 0.10496551941589514,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.35375,
						"acc_stderr,none": 0.01577628401219265,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.344,
						"acc_stderr,none": 0.015029633724408943,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.349,
						"acc_stderr,none": 0.0150806639915631,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.36583333333333334,
						"acc_stderr,none": 0.013910212062701172,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3378839590443686,
						"acc_norm,none": 0.36689419795221845,
						"acc_norm_stderr,none": 0.014084133118104296,
						"acc_stderr,none": 0.013822047922283523,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6691919191919192,
						"acc_norm,none": 0.6224747474747475,
						"acc_norm_stderr,none": 0.009947227833469428,
						"acc_stderr,none": 0.009654540125986131,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.841044776119403,
						"acc_stderr,none": 0.14697750407730398,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996695,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329824,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844881,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816324,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400241,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.742,
						"acc_stderr,none": 0.013842963108656604,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425682,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936713,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611484,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033845,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.007088105617246447,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240662,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919294,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.0047427305946567975,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397255,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.012461592646659993,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.012510816141264362,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731968,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.0077436402269192885,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.866,
						"acc_stderr,none": 0.01077776229836968,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426583,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.01518652793204012,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662758,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.781,
						"acc_stderr,none": 0.013084731950262003,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.719,
						"acc_stderr,none": 0.014221154708434923,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.011107987548939149,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679195,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.0077436402269192885,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406723,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323499,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.753,
						"acc_stderr,none": 0.01364467578131413,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823335,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.585,
						"acc_stderr,none": 0.01558903518560463,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.597,
						"acc_stderr,none": 0.015518757419066533,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.711,
						"acc_stderr,none": 0.014341711358296184,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426116,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.826,
						"acc_stderr,none": 0.011994493230973418,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.00909954953840022,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103305,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.013607356839598121,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389651,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000124,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936703,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.761,
						"acc_stderr,none": 0.013493000446937594,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.455,
						"acc_stderr,none": 0.01575510149834709,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679195,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487923,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426574,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.012795613612786567,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.398,
						"acc_stderr,none": 0.015486634102858925,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397214,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.00900889339265153,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.014632638658632905,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.01094526376104296,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.00940661918462123,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386708,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.011171786285496501,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897908,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817146,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.005568393575081332,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256578,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.396,
						"acc_stderr,none": 0.015473313265859408,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.337,
						"acc_stderr,none": 0.014955087918653605,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.2868243826627526,
						"acc_norm,none": 0.2868243826627526,
						"acc_norm_stderr,none": 0.049648038192079365,
						"acc_stderr,none": 0.049648038192079365,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.3431952662721893,
						"acc_norm,none": 0.3431952662721893,
						"acc_norm_stderr,none": 0.03662976569681105,
						"acc_stderr,none": 0.03662976569681105,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2905405405405405,
						"acc_norm,none": 0.2905405405405405,
						"acc_norm_stderr,none": 0.03744626397928733,
						"acc_stderr,none": 0.03744626397928733,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.30625,
						"acc_norm,none": 0.30625,
						"acc_norm_stderr,none": 0.036554511504337694,
						"acc_stderr,none": 0.036554511504337694,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.32727272727272727,
						"acc_norm,none": 0.32727272727272727,
						"acc_norm_stderr,none": 0.036639749943912434,
						"acc_stderr,none": 0.036639749943912434,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.33014354066985646,
						"acc_norm,none": 0.33014354066985646,
						"acc_norm_stderr,none": 0.03260698244181309,
						"acc_stderr,none": 0.03260698244181309,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.03675892481369823,
						"acc_stderr,none": 0.03675892481369823,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2824427480916031,
						"acc_norm,none": 0.2824427480916031,
						"acc_norm_stderr,none": 0.03948406125768361,
						"acc_stderr,none": 0.03948406125768361,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.22794117647058823,
						"acc_norm,none": 0.22794117647058823,
						"acc_norm_stderr,none": 0.03610519574180446,
						"acc_stderr,none": 0.03610519574180446,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2897196261682243,
						"acc_norm,none": 0.2897196261682243,
						"acc_norm_stderr,none": 0.0440606533474851,
						"acc_stderr,none": 0.0440606533474851,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.28792569659442724,
						"acc_norm,none": 0.28792569659442724,
						"acc_norm_stderr,none": 0.025233351305786727,
						"acc_stderr,none": 0.025233351305786727,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.03283472056108566,
						"acc_stderr,none": 0.03283472056108566,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3407821229050279,
						"acc_norm,none": 0.3407821229050279,
						"acc_norm_stderr,none": 0.03552572003977931,
						"acc_stderr,none": 0.03552572003977931,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2869198312236287,
						"acc_norm,none": 0.2869198312236287,
						"acc_norm_stderr,none": 0.029443773022594693,
						"acc_stderr,none": 0.029443773022594693,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.1792452830188679,
						"acc_norm,none": 0.1792452830188679,
						"acc_norm_stderr,none": 0.03743138631255277,
						"acc_stderr,none": 0.03743138631255277,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3364485981308411,
						"acc_norm,none": 0.3364485981308411,
						"acc_norm_stderr,none": 0.04589271111471628,
						"acc_stderr,none": 0.04589271111471628,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.044801270921106716,
						"acc_stderr,none": 0.044801270921106716,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2037037037037037,
						"acc_norm,none": 0.2037037037037037,
						"acc_norm_stderr,none": 0.03893542518824847,
						"acc_stderr,none": 0.03893542518824847,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.04429811949614585,
						"acc_stderr,none": 0.04429811949614585,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371222,
						"acc_stderr,none": 0.04198857662371222,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.25274725274725274,
						"acc_norm,none": 0.25274725274725274,
						"acc_norm_stderr,none": 0.026350722655564398,
						"acc_stderr,none": 0.026350722655564398,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.30392156862745096,
						"acc_norm,none": 0.30392156862745096,
						"acc_norm_stderr,none": 0.03228210387037892,
						"acc_stderr,none": 0.03228210387037892,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.23976608187134502,
						"acc_norm,none": 0.23976608187134502,
						"acc_norm_stderr,none": 0.03274485211946956,
						"acc_stderr,none": 0.03274485211946956,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.037115139596751764,
						"acc_stderr,none": 0.037115139596751764,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.34532374100719426,
						"acc_norm,none": 0.34532374100719426,
						"acc_norm_stderr,none": 0.04047501062151218,
						"acc_stderr,none": 0.04047501062151218,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.036522158784075054,
						"acc_stderr,none": 0.036522158784075054,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.32515337423312884,
						"acc_norm,none": 0.32515337423312884,
						"acc_norm_stderr,none": 0.03680350371286461,
						"acc_stderr,none": 0.03680350371286461,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.31976744186046513,
						"acc_norm,none": 0.31976744186046513,
						"acc_norm_stderr,none": 0.03566545538084812,
						"acc_stderr,none": 0.03566545538084812,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2976190476190476,
						"acc_norm,none": 0.2976190476190476,
						"acc_norm_stderr,none": 0.02885890598472122,
						"acc_stderr,none": 0.02885890598472122,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.29797979797979796,
						"acc_norm,none": 0.29797979797979796,
						"acc_norm_stderr,none": 0.032586303838365555,
						"acc_stderr,none": 0.032586303838365555,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.31932773109243695,
						"acc_norm,none": 0.31932773109243695,
						"acc_norm_stderr,none": 0.030283995525884396,
						"acc_stderr,none": 0.030283995525884396,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.27391304347826084,
						"acc_norm,none": 0.27391304347826084,
						"acc_norm_stderr,none": 0.029470189815005897,
						"acc_stderr,none": 0.029470189815005897,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.03915450630414251,
						"acc_stderr,none": 0.03915450630414251,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.32167832167832167,
						"acc_norm,none": 0.32167832167832167,
						"acc_norm_stderr,none": 0.03919986517659165,
						"acc_stderr,none": 0.03919986517659165,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2784090909090909,
						"acc_norm,none": 0.2784090909090909,
						"acc_norm_stderr,none": 0.03388193526335356,
						"acc_stderr,none": 0.03388193526335356,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2751677852348993,
						"acc_norm,none": 0.2751677852348993,
						"acc_norm_stderr,none": 0.03671019403342563,
						"acc_stderr,none": 0.03671019403342563,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.033341501981019636,
						"acc_stderr,none": 0.033341501981019636,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.03571791556468271,
						"acc_stderr,none": 0.03571791556468271,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.288135593220339,
						"acc_norm,none": 0.288135593220339,
						"acc_norm_stderr,none": 0.04187011593049808,
						"acc_stderr,none": 0.04187011593049808,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.033346454086653377,
						"acc_stderr,none": 0.033346454086653377,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.20909090909090908,
						"acc_norm,none": 0.20909090909090908,
						"acc_norm_stderr,none": 0.038950910157241364,
						"acc_stderr,none": 0.038950910157241364,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695623,
						"acc_stderr,none": 0.03737392962695623,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604675,
						"acc_stderr,none": 0.03893259610604675,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.03273943999002353,
						"acc_stderr,none": 0.03273943999002353,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.29069767441860467,
						"acc_norm,none": 0.29069767441860467,
						"acc_norm_stderr,none": 0.03472469304477599,
						"acc_stderr,none": 0.03472469304477599,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2725060827250608,
						"acc_norm,none": 0.2725060827250608,
						"acc_norm_stderr,none": 0.021989272196105032,
						"acc_stderr,none": 0.021989272196105032,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.40186915887850466,
						"acc_norm,none": 0.40186915887850466,
						"acc_norm_stderr,none": 0.03359314274571839,
						"acc_stderr,none": 0.03359314274571839,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2601626016260163,
						"acc_norm,none": 0.2601626016260163,
						"acc_norm_stderr,none": 0.03972012975450535,
						"acc_stderr,none": 0.03972012975450535,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2540983606557377,
						"acc_norm,none": 0.2540983606557377,
						"acc_norm_stderr,none": 0.03957756102798663,
						"acc_stderr,none": 0.03957756102798663,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.03041268445992877,
						"acc_stderr,none": 0.03041268445992877,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3277777777777778,
						"acc_norm,none": 0.3277777777777778,
						"acc_norm_stderr,none": 0.03508485373860691,
						"acc_stderr,none": 0.03508485373860691,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.32275132275132273,
						"acc_norm,none": 0.32275132275132273,
						"acc_norm_stderr,none": 0.03409802097064963,
						"acc_stderr,none": 0.03409802097064963,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.040832215386495764,
						"acc_stderr,none": 0.040832215386495764,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.038552896163789485,
						"acc_stderr,none": 0.038552896163789485,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.045521571818039494,
						"acc_stderr,none": 0.045521571818039494,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.21714285714285714,
						"acc_norm,none": 0.21714285714285714,
						"acc_norm_stderr,none": 0.03125643260090664,
						"acc_stderr,none": 0.03125643260090664,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.24170616113744076,
						"acc_norm,none": 0.24170616113744076,
						"acc_norm_stderr,none": 0.029542889951620098,
						"acc_stderr,none": 0.029542889951620098,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2473404255319149,
						"acc_norm,none": 0.2473404255319149,
						"acc_norm_stderr,none": 0.022280822212812246,
						"acc_stderr,none": 0.022280822212812246,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.30603448275862066,
						"acc_norm,none": 0.30603448275862066,
						"acc_norm_stderr,none": 0.030321322353578664,
						"acc_stderr,none": 0.030321322353578664,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.26436781609195403,
						"acc_norm,none": 0.26436781609195403,
						"acc_norm_stderr,none": 0.03352830517660786,
						"acc_stderr,none": 0.03352830517660786,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.3037037037037037,
						"acc_norm,none": 0.3037037037037037,
						"acc_norm_stderr,none": 0.03972552884785137,
						"acc_stderr,none": 0.03972552884785137,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3008849557522124,
						"acc_norm,none": 0.3008849557522124,
						"acc_norm_stderr,none": 0.030576185297580983,
						"acc_stderr,none": 0.030576185297580983,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0347769116216366,
						"acc_stderr,none": 0.0347769116216366,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.033695536918777164,
						"acc_stderr,none": 0.033695536918777164,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3136094674556213,
						"acc_norm,none": 0.3136094674556213,
						"acc_norm_stderr,none": 0.03579526516456226,
						"acc_stderr,none": 0.03579526516456226,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2795031055900621,
						"acc_norm,none": 0.2795031055900621,
						"acc_norm_stderr,none": 0.035477203909303916,
						"acc_stderr,none": 0.035477203909303916,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.3375,
						"acc_norm,none": 0.3375,
						"acc_norm_stderr,none": 0.03749999999999998,
						"acc_stderr,none": 0.03749999999999998,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.01769581142844805,
						"mcc_stderr,none": 0.03199086940323714
					},
					"copa": {
						"acc,none": 0.82,
						"acc_stderr,none": 0.038612291966536955,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5978144354454502,
						"acc_stderr,none": 0.01682655066263625,
						"alias": "glue",
						"f1,none": 0.44793443052418713,
						"f1_stderr,none": 0.0004358142543351673,
						"mcc,none": 0.01769581142844805,
						"mcc_stderr,none": 0.03199086940323714
					},
					"hellaswag": {
						"acc,none": 0.507468631746664,
						"acc_norm,none": 0.6845249950209121,
						"acc_norm_stderr,none": 0.004637550478007341,
						"acc_stderr,none": 0.00498922471578454,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.6853289346012031,
						"acc_stderr,none": 0.016145050162523537,
						"alias": "lambada",
						"perplexity,none": 4.35049429687675,
						"perplexity_stderr,none": 0.2650173014168244
					},
					"lambada_multilingual": {
						"acc,none": 0.5096448670677275,
						"acc_stderr,none": 0.08355407022874216,
						"alias": "lambada_multilingual",
						"perplexity,none": 27.780662609583274,
						"perplexity_stderr,none": 10.701381871528621
					},
					"lambada_openai": {
						"acc,none": 0.7151174073355328,
						"acc_stderr,none": 0.006288306538252608,
						"alias": " - lambada_openai",
						"perplexity,none": 3.8554275983682285,
						"perplexity_stderr,none": 0.0795803383257617
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.402483989908791,
						"acc_stderr,none": 0.00683220912137788,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 44.0912035604136,
						"perplexity_stderr,none": 2.5517158145635306
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7147292839122841,
						"acc_stderr,none": 0.006290880813622302,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.855681874746945,
						"perplexity_stderr,none": 0.07958862145533797
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4292645061129439,
						"acc_stderr,none": 0.006895916655437441,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 38.28626473533178,
						"perplexity_stderr,none": 1.9638273042895473
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5167863380555017,
						"acc_stderr,none": 0.006962050788037521,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 22.468985934710336,
						"perplexity_stderr,none": 1.1538382248811132
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.48496021734911704,
						"acc_stderr,none": 0.0069628256045532475,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 30.201176942713705,
						"perplexity_stderr,none": 1.6776463014832912
					},
					"lambada_standard": {
						"acc,none": 0.6557345235784979,
						"acc_stderr,none": 0.0066194641433124325,
						"alias": " - lambada_standard",
						"perplexity,none": 4.845228190431887,
						"perplexity_stderr,none": 0.1083522112848713
					},
					"logiqa": {
						"acc,none": 0.23195084485407066,
						"acc_norm,none": 0.29493087557603687,
						"acc_norm_stderr,none": 0.01788624973410439,
						"acc_stderr,none": 0.0165552524979259,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.2845036319612591,
						"acc_stderr,none": 0.04443329140354049,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34074074074074073,
						"acc_stderr,none": 0.04094376269996793,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3157894736842105,
						"acc_stderr,none": 0.03782728980865469,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.23773584905660378,
						"acc_stderr,none": 0.02619980880756193,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.03745554791462456,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.03861229196653696,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.042295258468165065,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.26011560693641617,
						"acc_stderr,none": 0.03345036916788992,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.04092563958237655,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.251063829787234,
						"acc_stderr,none": 0.028346963777162462,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.03999423879281335,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.31724137931034485,
						"acc_stderr,none": 0.0387835237213862,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2566137566137566,
						"acc_stderr,none": 0.022494510767503154,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.25396825396825395,
						"acc_stderr,none": 0.03893259610604673,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252604,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.29354838709677417,
						"acc_stderr,none": 0.025906087021319295,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.3054187192118227,
						"acc_stderr,none": 0.03240661565868408,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3696969696969697,
						"acc_stderr,none": 0.03769430314512568,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.29292929292929293,
						"acc_stderr,none": 0.03242497958178818,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.3005181347150259,
						"acc_stderr,none": 0.03308818594415751,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.23846153846153847,
						"acc_stderr,none": 0.02160629449464773,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24814814814814815,
						"acc_stderr,none": 0.0263357394040558,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.027553614467863814,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2913907284768212,
						"acc_stderr,none": 0.037101857261199946,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.26972477064220185,
						"acc_stderr,none": 0.01902848671111545,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.028353212866863438,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.0319800166011507,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2616033755274262,
						"acc_stderr,none": 0.02860951671699494,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.22869955156950672,
						"acc_stderr,none": 0.028188240046929193,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3053435114503817,
						"acc_stderr,none": 0.040393149787245605,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2960680127523911,
						"acc_stderr,none": 0.03995770487948786,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.38016528925619836,
						"acc_stderr,none": 0.04431324501968432,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.043300437496507416,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3006134969325153,
						"acc_stderr,none": 0.03602511318806771,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.33035714285714285,
						"acc_stderr,none": 0.04464285714285713,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.22330097087378642,
						"acc_stderr,none": 0.04123553189891431,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.33760683760683763,
						"acc_stderr,none": 0.030980296992618558,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.3537675606641124,
						"acc_stderr,none": 0.017098184708161903,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3092485549132948,
						"acc_stderr,none": 0.024883140570071755,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2346368715083799,
						"acc_stderr,none": 0.014173044098303661,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2973856209150327,
						"acc_stderr,none": 0.02617390850671858,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.28838107499195365,
						"acc_stderr,none": 0.050820289583667876,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3440514469453376,
						"acc_stderr,none": 0.026981478043648026,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.02622964917882116,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2801418439716312,
						"acc_stderr,none": 0.02678917235114024,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.303129074315515,
						"acc_stderr,none": 0.011738669951254293,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.22426470588235295,
						"acc_stderr,none": 0.025336848563332355,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2973856209150327,
						"acc_stderr,none": 0.018492596536396955,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.34545454545454546,
						"acc_stderr,none": 0.04554619617541054,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.22040816326530613,
						"acc_stderr,none": 0.02653704531214529,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.27331816704582385,
						"acc_stderr,none": 0.037323071068753,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.3034825870646766,
						"acc_stderr,none": 0.03251006816458618,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2743418966064065,
						"acc_stderr,none": 0.04811731274082466,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.24096385542168675,
						"acc_stderr,none": 0.033293941190735296,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.32748538011695905,
						"acc_stderr,none": 0.035993357714560276,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.36923076923076925,
						"acc_stderr,none": 0.004871482713047626,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3768307567127746,
						"acc_stderr,none": 0.004887393741867376,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6078431372549019,
						"acc_stderr,none": 0.024200729868327484,
						"alias": " - mrpc",
						"f1,none": 0.6680497925311203,
						"f1_stderr,none": 0.024845895131345674
					},
					"openbookqa": {
						"acc,none": 0.284,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.021930844120728505,
						"acc_stderr,none": 0.020186703693570847,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4505,
						"acc_stderr,none": 0.011128198119942876,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4475,
						"acc_stderr,none": 0.011121318125943089,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.442,
						"acc_stderr,none": 0.011107641056719623,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.539,
						"acc_stderr,none": 0.011149065020234335,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.549,
						"acc_stderr,none": 0.01112930504188632,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5095,
						"acc_stderr,none": 0.011181117282805228,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.497,
						"acc_stderr,none": 0.011182934722804556,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.49064285714285716,
						"acc_stderr,none": 0.03494544151236283,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7606093579978237,
						"acc_norm,none": 0.7676822633297062,
						"acc_norm_stderr,none": 0.009853201384168241,
						"acc_stderr,none": 0.009955884250291697,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7333607762891676,
						"acc_norm,none": 0.5449173429966196,
						"acc_norm_stderr,none": 0.009123237467138025,
						"acc_stderr,none": 0.1432506657057485,
						"alias": "pythia",
						"bits_per_byte,none": 0.6681164850500348,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5889970926784391,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.8554275983682285,
						"perplexity_stderr,none": 0.0795803383257617,
						"word_perplexity,none": 11.898599962078784,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.5306608090792605,
						"acc_stderr,none": 0.006752678446394558,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.7096957704674747,
						"acc_stderr,none": 0.00225744038643843,
						"alias": " - qqp",
						"f1,none": 0.4460282248548638,
						"f1_stderr,none": 0.00427363244171067
					},
					"record": {
						"alias": "record",
						"em,none": 0.2548,
						"em_stderr,none": 0.004357705240088591,
						"f1,none": 0.26359857164621353,
						"f1_stderr,none": 0.004370385835600856
					},
					"rte": {
						"acc,none": 0.5740072202166066,
						"acc_stderr,none": 0.02976495674177765,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.924,
						"acc_norm,none": 0.889,
						"acc_norm_stderr,none": 0.009938701010583726,
						"acc_stderr,none": 0.008384169266796393,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9139908256880734,
						"acc_stderr,none": 0.009500232412777832,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3021669907898528,
						"acc_stderr,none": 0.0013745202304622213,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2998776009791922,
						"bleu_acc_stderr,none": 0.016040352966713627,
						"bleu_diff,none": -8.537576383691066,
						"bleu_diff_stderr,none": 0.7979385066435613,
						"bleu_max,none": 25.534289248490584,
						"bleu_max_stderr,none": 0.7738217621519946,
						"rouge1_acc,none": 0.28518971848225216,
						"rouge1_acc_stderr,none": 0.015805827874454895,
						"rouge1_diff,none": -10.311220318370816,
						"rouge1_diff_stderr,none": 0.8452398723460741,
						"rouge1_max,none": 50.985648682662436,
						"rouge1_max_stderr,none": 0.8328524416427945,
						"rouge2_acc,none": 0.22888616891064872,
						"rouge2_acc_stderr,none": 0.014706994909055027,
						"rouge2_diff,none": -12.872902047541107,
						"rouge2_diff_stderr,none": 1.0280775057763192,
						"rouge2_max,none": 34.3582780532621,
						"rouge2_max_stderr,none": 0.9761613335634386,
						"rougeL_acc,none": 0.2717258261933905,
						"rougeL_acc_stderr,none": 0.01557284045287583,
						"rougeL_diff,none": -10.895617033214727,
						"rougeL_diff_stderr,none": 0.85659291548201,
						"rougeL_max,none": 47.90125789226123,
						"rougeL_max_stderr,none": 0.8485916680169915
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2998776009791922,
						"bleu_acc_stderr,none": 0.016040352966713627,
						"bleu_diff,none": -8.537576383691066,
						"bleu_diff_stderr,none": 0.7979385066435613,
						"bleu_max,none": 25.534289248490584,
						"bleu_max_stderr,none": 0.7738217621519946,
						"rouge1_acc,none": 0.28518971848225216,
						"rouge1_acc_stderr,none": 0.015805827874454895,
						"rouge1_diff,none": -10.311220318370816,
						"rouge1_diff_stderr,none": 0.8452398723460741,
						"rouge1_max,none": 50.985648682662436,
						"rouge1_max_stderr,none": 0.8328524416427945,
						"rouge2_acc,none": 0.22888616891064872,
						"rouge2_acc_stderr,none": 0.014706994909055027,
						"rouge2_diff,none": -12.872902047541107,
						"rouge2_diff_stderr,none": 1.0280775057763192,
						"rouge2_max,none": 34.3582780532621,
						"rouge2_max_stderr,none": 0.9761613335634386,
						"rougeL_acc,none": 0.2717258261933905,
						"rougeL_acc_stderr,none": 0.01557284045287583,
						"rougeL_diff,none": -10.895617033214727,
						"rougeL_diff_stderr,none": 0.85659291548201,
						"rougeL_max,none": 47.90125789226123,
						"rougeL_max_stderr,none": 0.8485916680169915
					},
					"truthfulqa_mc1": {
						"acc,none": 0.23378212974296206,
						"acc_stderr,none": 0.014816195991931584,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3705518518367436,
						"acc_stderr,none": 0.013787499855765842,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6681164850500348,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5889970926784391,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 11.898599962078784,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6527229676400947,
						"acc_stderr,none": 0.013380909249751237,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4084507042253521,
						"acc_stderr,none": 0.05875113694257525,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5673076923076923,
						"acc_stderr,none": 0.04881803687006195,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.598909090909091,
						"acc_stderr,none": 0.06248804218701853,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.564,
						"acc_stderr,none": 0.022198954641476802,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044296,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.020514426225628053,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.020704041021724802,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.022331264423258383,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269945,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.021772369465547194,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.666,
						"acc_stderr,none": 0.02111349234774373,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.67,
						"acc_stderr,none": 0.021049612166134796,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.44144578313253013,
						"acc_stderr,none": 0.04973010330498448,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3393574297188755,
						"acc_stderr,none": 0.009490727635646755,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.46224899598393576,
						"acc_stderr,none": 0.009993466360872783,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4903614457831325,
						"acc_stderr,none": 0.010020210558438302,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.42690763052208835,
						"acc_stderr,none": 0.00991440882858341,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5538152610441767,
						"acc_stderr,none": 0.00996385427413915,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.47670682730923697,
						"acc_stderr,none": 0.010011191570021302,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5116465863453815,
						"acc_stderr,none": 0.010019353650807701,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42409638554216866,
						"acc_stderr,none": 0.009905918244994481,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4799196787148594,
						"acc_stderr,none": 0.01001398741923408,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.37991967871485943,
						"acc_stderr,none": 0.009728758452987863,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42048192771084336,
						"acc_stderr,none": 0.00989451955110577,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4562248995983936,
						"acc_stderr,none": 0.009983589197693925,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40240963855421685,
						"acc_stderr,none": 0.009829321288467436,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42931726907630524,
						"acc_stderr,none": 0.009921425969589916,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3682730923694779,
						"acc_stderr,none": 0.009668013178998446,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6098309367667408,
						"acc_stderr,none": 0.061667967777701574,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5777630708140304,
						"acc_stderr,none": 0.012710555263676445,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7643944407677035,
						"acc_stderr,none": 0.0109210162449263,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6757114493712773,
						"acc_stderr,none": 0.01204641922999533,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5473196558570483,
						"acc_stderr,none": 0.012809372866181962,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5804103242885507,
						"acc_stderr,none": 0.012699642268200759,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6525479814692257,
						"acc_stderr,none": 0.01225364152793529,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5109199205823958,
						"acc_stderr,none": 0.012864056278255032,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6578424884182661,
						"acc_stderr,none": 0.01220915270747284,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5301125082726671,
						"acc_stderr,none": 0.012843769248432167,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5777630708140304,
						"acc_stderr,none": 0.012710555263676445,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6333553937789543,
						"acc_stderr,none": 0.012401034429990701,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7792762418521016,
						"acc_stderr,none": 0.045618169886461044,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8554838709677419,
						"acc_stderr,none": 0.007293668342043698,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7951807228915663,
						"acc_stderr,none": 0.044566795694258816,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6725755995828988,
						"acc_stderr,none": 0.01516154012579233,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7414448669201521,
						"acc_stderr,none": 0.02704988156186834,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6476190476190476,
						"acc_stderr,none": 0.026958839632509345,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7301587301587301,
						"acc_stderr,none": 0.01979150564482921,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "RWKV/rwkv-6-world-3b-v2.1"
	},
	"RWKV/rwkv-raven-7b": {
		"config": {
			"dtype=float16,trust_remote_code=True": {
				"confObj": {
					"dtype": "float16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=float16,trust_remote_code=True",
				"groups": {
					"mmlu": {
						"acc,none": 0.23593505198689646,
						"acc_stderr,none": 0.04009745711689859,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.240807651434644,
						"acc_stderr,none": 0.03170227792277657,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2520115867396202,
						"acc_stderr,none": 0.04198814734226423,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.22229444263893403,
						"acc_stderr,none": 0.0342841954712098,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2261338407865525,
						"acc_stderr,none": 0.050060158709281405,
						"alias": " - stem"
					}
				},
				"results": {
					"mmlu": {
						"acc,none": 0.23593505198689646,
						"acc_stderr,none": 0.04009745711689859,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.18518518518518517,
						"acc_stderr,none": 0.0335567721631314,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.19736842105263158,
						"acc_stderr,none": 0.03238981601699397,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.21509433962264152,
						"acc_stderr,none": 0.025288394502891366,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2569444444444444,
						"acc_stderr,none": 0.03653946969442099,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036847,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.038612291966536955,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.20809248554913296,
						"acc_stderr,none": 0.030952890217749884,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.22549019607843138,
						"acc_stderr,none": 0.041583075330832865,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.26382978723404255,
						"acc_stderr,none": 0.02880998985410297,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.03999423879281335,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2413793103448276,
						"acc_stderr,none": 0.03565998174135302,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24867724867724866,
						"acc_stderr,none": 0.022261817692400175,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.19047619047619047,
						"acc_stderr,none": 0.03512207412302052,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.038612291966536934,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.02275520495954294,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.13793103448275862,
						"acc_stderr,none": 0.024261984301044582,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.22424242424242424,
						"acc_stderr,none": 0.03256866661681102,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.17676767676767677,
						"acc_stderr,none": 0.027178752639044915,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.21761658031088082,
						"acc_stderr,none": 0.02977866303775295,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2076923076923077,
						"acc_stderr,none": 0.020567539567246797,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23703703703703705,
						"acc_stderr,none": 0.02592887613276612,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.23109243697478993,
						"acc_stderr,none": 0.02738140692786896,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.1986754966887417,
						"acc_stderr,none": 0.032578473844367774,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.1944954128440367,
						"acc_stderr,none": 0.016970289090458054,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2037037037037037,
						"acc_stderr,none": 0.027467401804057986,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.030587591351604246,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.27848101265822783,
						"acc_stderr,none": 0.02917868230484256,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3183856502242152,
						"acc_stderr,none": 0.03126580522513713,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.25190839694656486,
						"acc_stderr,none": 0.03807387116306086,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.240807651434644,
						"acc_stderr,none": 0.03170227792277657,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2396694214876033,
						"acc_stderr,none": 0.03896878985070417,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.26851851851851855,
						"acc_stderr,none": 0.04284467968052191,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.22085889570552147,
						"acc_stderr,none": 0.032591773927421776,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.04432804055291518,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.1941747572815534,
						"acc_stderr,none": 0.039166677628225836,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2692307692307692,
						"acc_stderr,none": 0.029058588303748845,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.24521072796934865,
						"acc_stderr,none": 0.015384352284543946,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.24566473988439305,
						"acc_stderr,none": 0.023176298203992012,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2424581005586592,
						"acc_stderr,none": 0.014333522059217892,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.22875816993464052,
						"acc_stderr,none": 0.024051029739912258,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2520115867396202,
						"acc_stderr,none": 0.04198814734226423,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.1832797427652733,
						"acc_stderr,none": 0.02197419884826582,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.21296296296296297,
						"acc_stderr,none": 0.022779719088733393,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.23049645390070922,
						"acc_stderr,none": 0.025123739226872405,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.24511082138200782,
						"acc_stderr,none": 0.010986307870045516,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3088235294117647,
						"acc_stderr,none": 0.02806499816704009,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.25163398692810457,
						"acc_stderr,none": 0.01755581809132227,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.20909090909090908,
						"acc_stderr,none": 0.038950910157241364,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.21224489795918366,
						"acc_stderr,none": 0.026176967197866767,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.22229444263893403,
						"acc_stderr,none": 0.0342841954712098,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.23383084577114427,
						"acc_stderr,none": 0.02992941540834839,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2261338407865525,
						"acc_stderr,none": 0.050060158709281405,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.2710843373493976,
						"acc_stderr,none": 0.03460579907553027,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.32748538011695905,
						"acc_stderr,none": 0.035993357714560276,
						"alias": "  - world_religions"
					}
				}
			}
		},
		"name": "RWKV/rwkv-raven-7b"
	},
	"RWKV/v5-Eagle-7B-HF": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.624859075535513,
						"acc_norm,none": 0.6237316798196166,
						"acc_norm_stderr,none": 0.09140588016411445,
						"acc_stderr,none": 0.10866313811862532,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3590625,
						"acc_stderr,none": 0.017704453505961653,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.245,
						"acc_stderr,none": 0.16240947863716776,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8389552238805972,
						"acc_stderr,none": 0.14993373109834782,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2882615156017832,
						"acc_norm,none": 0.2882615156017832,
						"acc_norm_stderr,none": 0.13179977762218675,
						"acc_stderr,none": 0.13179977762218675,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.30348817130029343,
						"acc_norm,none": 0.30348817130029343,
						"acc_norm_stderr,none": 0.05727621106572747,
						"acc_stderr,none": 0.05727621106572747,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4122409809183063,
						"likelihood_diff_stderr,none": 0.5262720974728141,
						"pct_stereotype,none": 0.6238819320214669,
						"pct_stereotype_stderr,none": 0.073363024319605
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"glue": {
						"acc,none": 0.57521747059944,
						"acc_stderr,none": 0.11511626491771801,
						"alias": "glue",
						"f1,none": 0.6884135263660871,
						"f1_stderr,none": 0.00019993767951205328,
						"mcc,none": 0.00286100001416597,
						"mcc_stderr,none": 0.0009501393580116212
					},
					"kmmlu": {
						"acc,none": 0.1267686976609876,
						"acc_norm,none": 0.1267686976609876,
						"acc_norm_stderr,none": 0.05300410273182537,
						"acc_stderr,none": 0.05300410273182537,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5354089015566762,
						"acc_norm,none": 0.57,
						"acc_norm_stderr,none": 0.0004911823647294576,
						"acc_stderr,none": 0.04729088513119564,
						"alias": "kobest",
						"f1,none": 0.45202421669965,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7141470987774112,
						"acc_stderr,none": 0.015852858101128752,
						"alias": "lambada",
						"perplexity,none": 3.8043475336263994,
						"perplexity_stderr,none": 0.2281685057981299
					},
					"lambada_cloze": {
						"acc,none": 0.07374345041723268,
						"acc_stderr,none": 0.004172041356985056,
						"alias": "lambada_cloze",
						"perplexity,none": 374.82042882342057,
						"perplexity_stderr,none": 119.54818849469166
					},
					"lambada_multilingual": {
						"acc,none": 0.5373568794876771,
						"acc_stderr,none": 0.08485396843250168,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.000058641883392,
						"perplexity_stderr,none": 8.215000706142517
					},
					"mmlu": {
						"acc,none": 0.3321464178891896,
						"acc_stderr,none": 0.06091173753049962,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3256110520722636,
						"acc_stderr,none": 0.059599616018790984,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.37013196009011906,
						"acc_stderr,none": 0.05585574688367252,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3539161520961976,
						"acc_stderr,none": 0.048395331555541835,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.28322232794164287,
						"acc_stderr,none": 0.05862955260080505,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.3567068843151171,
						"acc_norm,none": 0.32404988846927757,
						"acc_norm_stderr,none": 0.00010850708035438849,
						"acc_stderr,none": 0.09804760905447438,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4555,
						"acc_stderr,none": 0.05501642166188383,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.743687308145615,
						"acc_norm,none": 0.6277400828170847,
						"acc_norm_stderr,none": 0.010306063670327702,
						"acc_stderr,none": 0.14382358537776974,
						"alias": "pythia",
						"bits_per_byte,none": 0.6325288887179478,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5502800869079052,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3738584556634206,
						"perplexity_stderr,none": 0.06613091584551405,
						"word_perplexity,none": 10.428191022549841,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3900709219858156,
						"acc_norm,none": 0.4308510638297872,
						"acc_norm_stderr,none": 0.04969392170788078,
						"acc_stderr,none": 0.03996448391177798,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6461681807593758,
						"acc_stderr,none": 0.07201369718907223,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3302969599916217,
						"acc_stderr,none": 0.001652651966553619,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3488372093023256,
						"bleu_acc_stderr,none": 0.016684419859986886,
						"bleu_diff,none": -4.7676025757365466,
						"bleu_diff_stderr,none": 0.9399419703452422,
						"bleu_max,none": 28.583595303310176,
						"bleu_max_stderr,none": 0.8227729682715753,
						"rouge1_acc,none": 0.3157894736842105,
						"rouge1_acc_stderr,none": 0.01627228795791692,
						"rouge1_diff,none": -5.9031435220229875,
						"rouge1_diff_stderr,none": 1.0814414904516871,
						"rouge1_max,none": 53.89250627842206,
						"rouge1_max_stderr,none": 0.8743965162035956,
						"rouge2_acc,none": 0.29008567931456547,
						"rouge2_acc_stderr,none": 0.01588623687420952,
						"rouge2_diff,none": -7.549674880493665,
						"rouge2_diff_stderr,none": 1.2612143030210712,
						"rouge2_max,none": 38.248938544143805,
						"rouge2_max_stderr,none": 1.0558298668477941,
						"rougeL_acc,none": 0.32802937576499386,
						"rougeL_acc_stderr,none": 0.016435632932815032,
						"rougeL_diff,none": -5.8769273011964565,
						"rougeL_diff_stderr,none": 1.098816821965796,
						"rougeL_max,none": 51.29782509190946,
						"rougeL_max_stderr,none": 0.8984521547089732
					},
					"xcopa": {
						"acc,none": 0.6223636363636363,
						"acc_stderr,none": 0.07056280911012106,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43991967871485943,
						"acc_stderr,none": 0.04533624542720319,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6331748992238734,
						"acc_stderr,none": 0.053129590825981766,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8042256686895932,
						"acc_stderr,none": 0.03648190721834694,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.624859075535513,
						"acc_norm,none": 0.6237316798196166,
						"acc_norm_stderr,none": 0.09140588016411445,
						"acc_stderr,none": 0.10866313811862532,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3590625,
						"acc_stderr,none": 0.017704453505961653,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.015356947477797577,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.015039986742055237,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.35333333333333333,
						"acc_stderr,none": 0.013804572162314933,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.39505119453924914,
						"acc_norm,none": 0.4308873720136519,
						"acc_norm_stderr,none": 0.014471133392642473,
						"acc_stderr,none": 0.014285898292938169,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7382154882154882,
						"acc_norm,none": 0.7188552188552189,
						"acc_norm_stderr,none": 0.009224735470286998,
						"acc_stderr,none": 0.009020523527210177,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.245,
						"acc_stderr,none": 0.16240947863716776,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0095,
						"acc_stderr,none": 0.0021696148539100363,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.4795,
						"acc_stderr,none": 0.011173732641806813,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0575,
						"acc_stderr,none": 0.005206767732010568,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.5955,
						"acc_stderr,none": 0.010977254896490816,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.2735,
						"acc_stderr,none": 0.00996988336376831,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.4115,
						"acc_stderr,none": 0.011006563824537309,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.1015,
						"acc_stderr,none": 0.006754382713684517,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.2985,
						"acc_stderr,none": 0.010234805842091585,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0945,
						"acc_stderr,none": 0.006542650696703085,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.1285,
						"acc_stderr,none": 0.0074847769467749,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0004338394793926247,
						"acc_stderr,none": 0.00043383947939263187,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8389552238805972,
						"acc_stderr,none": 0.14993373109834782,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074794,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030006,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000059,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.011828605831454266,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651514,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877364,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.607,
						"acc_stderr,none": 0.015452824654081498,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.01310617304066178,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042953,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469304,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565656,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.00619987406633707,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389629,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452375,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.0076298239962803134,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291603,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611495,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333371,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.012841374572096925,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.771,
						"acc_stderr,none": 0.013294199326613606,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816325,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704166,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.011828605831454269,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319421,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.429,
						"acc_stderr,none": 0.01565899754787024,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992438,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557844,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.685,
						"acc_stderr,none": 0.01469663196079251,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341673,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792939,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240663,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.0076298239962803134,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024954,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.015275252316519364,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.01035486471293673,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.015797897758042755,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.607,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.014606483127342758,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280311,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357788,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662732,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695804,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.809,
						"acc_stderr,none": 0.012436787112179479,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140924,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578247,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796394,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366664,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.01578686875935899,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.0057338361396954505,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491134,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.00371723254825656,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.731,
						"acc_stderr,none": 0.014029819522568198,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.475,
						"acc_stderr,none": 0.015799513429996012,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160328,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426116,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.633,
						"acc_stderr,none": 0.015249378464171745,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855757,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.012259457340938574,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.01123486636423525,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286411,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.00833333333333335,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.00453647215130651,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.00556839357508138,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.401,
						"acc_stderr,none": 0.015506109745498329,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.015167928865407559,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6819571865443425,
						"acc_stderr,none": 0.00814542760718583,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.16071428571428573,
						"acc_stderr,none": 0.04952230059306299,
						"alias": "cb",
						"f1,none": 0.1572449642625081,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2882615156017832,
						"acc_norm,none": 0.2882615156017832,
						"acc_norm_stderr,none": 0.13179977762218675,
						"acc_stderr,none": 0.13179977762218675,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.3469387755102041,
						"acc_norm,none": 0.3469387755102041,
						"acc_norm_stderr,none": 0.06870411522695292,
						"acc_stderr,none": 0.06870411522695292,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5454545454545454,
						"acc_norm,none": 0.5454545454545454,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122592,
						"acc_stderr,none": 0.08503766788122592,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.391304347826087,
						"acc_norm,none": 0.391304347826087,
						"acc_norm_stderr,none": 0.10405096111532161,
						"acc_stderr,none": 0.10405096111532161,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.3191489361702128,
						"acc_norm,none": 0.3191489361702128,
						"acc_norm_stderr,none": 0.0687296045180637,
						"acc_stderr,none": 0.0687296045180637,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.13636363636363635,
						"acc_norm,none": 0.13636363636363635,
						"acc_norm_stderr,none": 0.0748867700952649,
						"acc_stderr,none": 0.0748867700952649,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.05443310539518174,
						"acc_stderr,none": 0.05443310539518174,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.07802030664724673,
						"acc_stderr,none": 0.07802030664724673,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.5517241379310345,
						"acc_norm,none": 0.5517241379310345,
						"acc_norm_stderr,none": 0.09398415777506855,
						"acc_stderr,none": 0.09398415777506855,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.060073850409370216,
						"acc_stderr,none": 0.060073850409370216,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.06652247352247599,
						"acc_stderr,none": 0.06652247352247599,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.1049727762162956,
						"acc_stderr,none": 0.1049727762162956,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.07624928516630235,
						"acc_stderr,none": 0.07624928516630235,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.2826086956521739,
						"acc_norm,none": 0.2826086956521739,
						"acc_norm_stderr,none": 0.06712194885164874,
						"acc_stderr,none": 0.06712194885164874,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.30348817130029343,
						"acc_norm,none": 0.30348817130029343,
						"acc_norm_stderr,none": 0.05727621106572747,
						"acc_stderr,none": 0.05727621106572747,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2781065088757396,
						"acc_norm,none": 0.2781065088757396,
						"acc_norm_stderr,none": 0.03456905430376244,
						"acc_stderr,none": 0.03456905430376244,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.3108108108108108,
						"acc_norm,none": 0.3108108108108108,
						"acc_norm_stderr,none": 0.03817320450441154,
						"acc_stderr,none": 0.03817320450441154,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.21951219512195122,
						"acc_norm,none": 0.21951219512195122,
						"acc_norm_stderr,none": 0.032420416133953835,
						"acc_stderr,none": 0.032420416133953835,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.41875,
						"acc_norm,none": 0.41875,
						"acc_norm_stderr,none": 0.0391255387569151,
						"acc_stderr,none": 0.0391255387569151,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03453131801885415,
						"acc_stderr,none": 0.03453131801885415,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3397129186602871,
						"acc_norm,none": 0.3397129186602871,
						"acc_norm_stderr,none": 0.03283906353745934,
						"acc_stderr,none": 0.03283906353745934,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2125,
						"acc_norm,none": 0.2125,
						"acc_norm_stderr,none": 0.03244189290245474,
						"acc_stderr,none": 0.03244189290245474,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3511450381679389,
						"acc_norm,none": 0.3511450381679389,
						"acc_norm_stderr,none": 0.0418644516301375,
						"acc_stderr,none": 0.0418644516301375,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.040263772107873096,
						"acc_stderr,none": 0.040263772107873096,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.308411214953271,
						"acc_norm,none": 0.308411214953271,
						"acc_norm_stderr,none": 0.04485760883316698,
						"acc_stderr,none": 0.04485760883316698,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.34365325077399383,
						"acc_norm,none": 0.34365325077399383,
						"acc_norm_stderr,none": 0.02646664923557931,
						"acc_stderr,none": 0.02646664923557931,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3137254901960784,
						"acc_norm,none": 0.3137254901960784,
						"acc_norm_stderr,none": 0.03256685484460389,
						"acc_stderr,none": 0.03256685484460389,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3240223463687151,
						"acc_norm,none": 0.3240223463687151,
						"acc_norm_stderr,none": 0.03507871288800094,
						"acc_stderr,none": 0.03507871288800094,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2869198312236287,
						"acc_norm,none": 0.2869198312236287,
						"acc_norm_stderr,none": 0.02944377302259469,
						"acc_stderr,none": 0.02944377302259469,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800375,
						"acc_stderr,none": 0.04142972007800375,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004223,
						"acc_stderr,none": 0.04742907046004223,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.37735849056603776,
						"acc_norm,none": 0.37735849056603776,
						"acc_norm_stderr,none": 0.04730439022852894,
						"acc_stderr,none": 0.04730439022852894,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.24074074074074073,
						"acc_norm,none": 0.24074074074074073,
						"acc_norm_stderr,none": 0.04133119440243838,
						"acc_stderr,none": 0.04133119440243838,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.042857142857142844,
						"acc_stderr,none": 0.042857142857142844,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2490842490842491,
						"acc_norm,none": 0.2490842490842491,
						"acc_norm_stderr,none": 0.02622311550050611,
						"acc_stderr,none": 0.02622311550050611,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3480392156862745,
						"acc_norm,none": 0.3480392156862745,
						"acc_norm_stderr,none": 0.033433112404884176,
						"acc_stderr,none": 0.033433112404884176,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.30994152046783624,
						"acc_norm,none": 0.30994152046783624,
						"acc_norm_stderr,none": 0.035469769593931624,
						"acc_stderr,none": 0.035469769593931624,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.302158273381295,
						"acc_norm,none": 0.302158273381295,
						"acc_norm_stderr,none": 0.039089144792915614,
						"acc_stderr,none": 0.039089144792915614,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.03767609312195345,
						"acc_stderr,none": 0.03767609312195345,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.32515337423312884,
						"acc_norm,none": 0.32515337423312884,
						"acc_norm_stderr,none": 0.03680350371286461,
						"acc_stderr,none": 0.03680350371286461,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.03407826167337437,
						"acc_stderr,none": 0.03407826167337437,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.028746730632681374,
						"acc_stderr,none": 0.028746730632681374,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03173071239071724,
						"acc_stderr,none": 0.03173071239071724,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.42016806722689076,
						"acc_norm,none": 0.42016806722689076,
						"acc_norm_stderr,none": 0.03206183783236152,
						"acc_stderr,none": 0.03206183783236152,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.22608695652173913,
						"acc_norm,none": 0.22608695652173913,
						"acc_norm_stderr,none": 0.02764178570724133,
						"acc_stderr,none": 0.02764178570724133,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.037857144650666544,
						"acc_stderr,none": 0.037857144650666544,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.34265734265734266,
						"acc_norm,none": 0.34265734265734266,
						"acc_norm_stderr,none": 0.03982738177809643,
						"acc_stderr,none": 0.03982738177809643,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.03448901746724546,
						"acc_stderr,none": 0.03448901746724546,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.33557046979865773,
						"acc_norm,none": 0.33557046979865773,
						"acc_norm_stderr,none": 0.03881373830315734,
						"acc_stderr,none": 0.03881373830315734,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.03661433360410718,
						"acc_stderr,none": 0.03661433360410718,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3220338983050847,
						"acc_norm,none": 0.3220338983050847,
						"acc_norm_stderr,none": 0.04319782230261343,
						"acc_stderr,none": 0.04319782230261343,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814535,
						"acc_stderr,none": 0.03470398212814535,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.04069306319721376,
						"acc_stderr,none": 0.04069306319721376,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3006993006993007,
						"acc_norm,none": 0.3006993006993007,
						"acc_norm_stderr,none": 0.03848167949490064,
						"acc_stderr,none": 0.03848167949490064,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.3492063492063492,
						"acc_norm,none": 0.3492063492063492,
						"acc_norm_stderr,none": 0.042639068927951315,
						"acc_stderr,none": 0.042639068927951315,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3430232558139535,
						"acc_norm,none": 0.3430232558139535,
						"acc_norm_stderr,none": 0.03630268317574835,
						"acc_stderr,none": 0.03630268317574835,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2749391727493917,
						"acc_norm,none": 0.2749391727493917,
						"acc_norm_stderr,none": 0.022050254355995072,
						"acc_stderr,none": 0.022050254355995072,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.40186915887850466,
						"acc_norm,none": 0.40186915887850466,
						"acc_norm_stderr,none": 0.03359314274571839,
						"acc_stderr,none": 0.03359314274571839,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.040113743936211456,
						"acc_stderr,none": 0.040113743936211456,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.32786885245901637,
						"acc_norm,none": 0.32786885245901637,
						"acc_norm_stderr,none": 0.042676068742999555,
						"acc_stderr,none": 0.042676068742999555,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03260773253630124,
						"acc_stderr,none": 0.03260773253630124,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3111111111111111,
						"acc_norm,none": 0.3111111111111111,
						"acc_norm_stderr,none": 0.03460236918732732,
						"acc_stderr,none": 0.03460236918732732,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3386243386243386,
						"acc_norm,none": 0.3386243386243386,
						"acc_norm_stderr,none": 0.03451471285997055,
						"acc_stderr,none": 0.03451471285997055,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.041265147363240995,
						"acc_stderr,none": 0.041265147363240995,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.30344827586206896,
						"acc_norm,none": 0.30344827586206896,
						"acc_norm_stderr,none": 0.038312260488503336,
						"acc_stderr,none": 0.038312260488503336,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.04761904761904763,
						"acc_stderr,none": 0.04761904761904763,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2914285714285714,
						"acc_norm,none": 0.2914285714285714,
						"acc_norm_stderr,none": 0.03444952656229018,
						"acc_stderr,none": 0.03444952656229018,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846655,
						"acc_stderr,none": 0.030469670650846655,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.24202127659574468,
						"acc_norm,none": 0.24202127659574468,
						"acc_norm_stderr,none": 0.022117683921586972,
						"acc_stderr,none": 0.022117683921586972,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3620689655172414,
						"acc_norm,none": 0.3620689655172414,
						"acc_norm_stderr,none": 0.03162106740099062,
						"acc_stderr,none": 0.03162106740099062,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3620689655172414,
						"acc_norm,none": 0.3620689655172414,
						"acc_norm_stderr,none": 0.03653923615465969,
						"acc_stderr,none": 0.03653923615465969,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.0391545063041425,
						"acc_stderr,none": 0.0391545063041425,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.336283185840708,
						"acc_norm,none": 0.336283185840708,
						"acc_norm_stderr,none": 0.03149580605318969,
						"acc_stderr,none": 0.03149580605318969,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.035886248000917075,
						"acc_stderr,none": 0.035886248000917075,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.03273943999002355,
						"acc_stderr,none": 0.03273943999002355,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3136094674556213,
						"acc_norm,none": 0.3136094674556213,
						"acc_norm_stderr,none": 0.03579526516456225,
						"acc_stderr,none": 0.03579526516456225,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3167701863354037,
						"acc_norm,none": 0.3167701863354037,
						"acc_norm_stderr,none": 0.036778631311574536,
						"acc_stderr,none": 0.036778631311574536,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2875,
						"acc_norm,none": 0.2875,
						"acc_norm_stderr,none": 0.035893251060583956,
						"acc_stderr,none": 0.035893251060583956,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.00286100001416597,
						"mcc_stderr,none": 0.030824330617413596
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.032659863237109066,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4122409809183063,
						"likelihood_diff_stderr,none": 0.5262720974728141,
						"pct_stereotype,none": 0.6238819320214669,
						"pct_stereotype_stderr,none": 0.073363024319605
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.6393858079904593,
						"likelihood_diff_stderr,none": 0.08821435868817994,
						"pct_stereotype,none": 0.6416219439475254,
						"pct_stereotype_stderr,none": 0.011713139129932815
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.1401098901098905,
						"likelihood_diff_stderr,none": 0.4115891874265934,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 7.159090909090909,
						"likelihood_diff_stderr,none": 1.8492236476893629,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.107692307692307,
						"likelihood_diff_stderr,none": 0.6397530236059988,
						"pct_stereotype,none": 0.7846153846153846,
						"pct_stereotype_stderr,none": 0.051386112368797664
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.4609375,
						"likelihood_diff_stderr,none": 0.16280657326200262,
						"pct_stereotype,none": 0.621875,
						"pct_stereotype_stderr,none": 0.027150254412347145
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.596064814814815,
						"likelihood_diff_stderr,none": 0.24084526516996982,
						"pct_stereotype,none": 0.5787037037037037,
						"pct_stereotype_stderr,none": 0.033674621388960775
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.7881944444444446,
						"likelihood_diff_stderr,none": 0.3413666566950625,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.3206200787401574,
						"likelihood_diff_stderr,none": 0.14970005173800094,
						"pct_stereotype,none": 0.5374015748031497,
						"pct_stereotype_stderr,none": 0.022143566088969842
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.8524774774774775,
						"likelihood_diff_stderr,none": 0.3460250110133974,
						"pct_stereotype,none": 0.7837837837837838,
						"pct_stereotype_stderr,none": 0.03925056618715647
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.846774193548387,
						"likelihood_diff_stderr,none": 0.40461832947857274,
						"pct_stereotype,none": 0.9032258064516129,
						"pct_stereotype_stderr,none": 0.03082364793244869
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.429605263157895,
						"likelihood_diff_stderr,none": 0.2601888034894752,
						"pct_stereotype,none": 0.6842105263157895,
						"pct_stereotype_stderr,none": 0.033811372338927476
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.1879099582587953,
						"likelihood_diff_stderr,none": 0.07307632319346823,
						"pct_stereotype,none": 0.6064400715563506,
						"pct_stereotype_stderr,none": 0.011933349890055874
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.1625,
						"likelihood_diff_stderr,none": 0.2812023992177924,
						"pct_stereotype,none": 0.6444444444444445,
						"pct_stereotype_stderr,none": 0.05074011803597718
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.2596153846153846,
						"likelihood_diff_stderr,none": 0.5111852935475065,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.981060606060606,
						"likelihood_diff_stderr,none": 0.4748730233320052,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.05524032911365453
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.718068535825545,
						"likelihood_diff_stderr,none": 0.13777620949484298,
						"pct_stereotype,none": 0.5981308411214953,
						"pct_stereotype_stderr,none": 0.027407249156290024
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.5533596837944663,
						"likelihood_diff_stderr,none": 0.192814309944906,
						"pct_stereotype,none": 0.4426877470355731,
						"pct_stereotype_stderr,none": 0.031289438964526774
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.467013888888889,
						"likelihood_diff_stderr,none": 0.4108900705011094,
						"pct_stereotype,none": 0.6805555555555556,
						"pct_stereotype_stderr,none": 0.055335047518872166
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.8407608695652176,
						"likelihood_diff_stderr,none": 0.1347563350587834,
						"pct_stereotype,none": 0.5195652173913043,
						"pct_stereotype_stderr,none": 0.023320127087608274
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.0597826086956523,
						"likelihood_diff_stderr,none": 0.23918416981725277,
						"pct_stereotype,none": 0.782608695652174,
						"pct_stereotype_stderr,none": 0.038631448549506
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.4065934065934065,
						"likelihood_diff_stderr,none": 0.326805316310411,
						"pct_stereotype,none": 0.8351648351648352,
						"pct_stereotype_stderr,none": 0.039110176747367435
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.6278698979591835,
						"likelihood_diff_stderr,none": 0.2441334388693202,
						"pct_stereotype,none": 0.7397959183673469,
						"pct_stereotype_stderr,none": 0.031419242636774605
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"glue": {
						"acc,none": 0.57521747059944,
						"acc_stderr,none": 0.11511626491771801,
						"alias": "glue",
						"f1,none": 0.6884135263660871,
						"f1_stderr,none": 0.00019993767951205328,
						"mcc,none": 0.00286100001416597,
						"mcc_stderr,none": 0.0009501393580116212
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.08718726307808947,
						"exact_match_stderr,get-answer": 0.007770691416783575
					},
					"hellaswag": {
						"acc,none": 0.5263891655048795,
						"acc_norm,none": 0.7087233618801035,
						"acc_norm_stderr,none": 0.004534221350046123,
						"acc_stderr,none": 0.004982826916687147,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.1267686976609876,
						"acc_norm,none": 0.1267686976609876,
						"acc_norm_stderr,none": 0.05300410273182537,
						"acc_stderr,none": 0.05300410273182537,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.038612291966536955,
						"acc_stderr,none": 0.038612291966536955,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.114,
						"acc_norm,none": 0.114,
						"acc_norm_stderr,none": 0.010055103435823333,
						"acc_stderr,none": 0.010055103435823333,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.095,
						"acc_norm,none": 0.095,
						"acc_norm_stderr,none": 0.009276910103103329,
						"acc_stderr,none": 0.009276910103103329,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.194,
						"acc_norm,none": 0.194,
						"acc_norm_stderr,none": 0.01251081614126436,
						"acc_stderr,none": 0.01251081614126436,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.209,
						"acc_norm,none": 0.209,
						"acc_norm_stderr,none": 0.012864077288499356,
						"acc_stderr,none": 0.012864077288499356,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.17333333333333334,
						"acc_norm,none": 0.17333333333333334,
						"acc_norm_stderr,none": 0.015466528504746212,
						"acc_stderr,none": 0.015466528504746212,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.058,
						"acc_norm,none": 0.058,
						"acc_norm_stderr,none": 0.007395315455792939,
						"acc_stderr,none": 0.007395315455792939,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.11,
						"acc_norm,none": 0.11,
						"acc_norm_stderr,none": 0.00989939381972443,
						"acc_stderr,none": 0.00989939381972443,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.051,
						"acc_norm,none": 0.051,
						"acc_norm_stderr,none": 0.006960420062571401,
						"acc_stderr,none": 0.006960420062571401,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.072,
						"acc_norm,none": 0.072,
						"acc_norm_stderr,none": 0.008178195576218681,
						"acc_stderr,none": 0.008178195576218681,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.24615384615384617,
						"acc_norm,none": 0.24615384615384617,
						"acc_norm_stderr,none": 0.03792711596479614,
						"acc_stderr,none": 0.03792711596479614,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.0416333199893227,
						"acc_stderr,none": 0.0416333199893227,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.045,
						"acc_norm,none": 0.045,
						"acc_norm_stderr,none": 0.006558812241406125,
						"acc_stderr,none": 0.006558812241406125,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.072,
						"acc_norm,none": 0.072,
						"acc_norm_stderr,none": 0.008178195576218681,
						"acc_stderr,none": 0.008178195576218681,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.197,
						"acc_norm,none": 0.197,
						"acc_norm_stderr,none": 0.012583693787968118,
						"acc_stderr,none": 0.012583693787968118,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.054,
						"acc_norm,none": 0.054,
						"acc_norm_stderr,none": 0.007150883521295444,
						"acc_stderr,none": 0.007150883521295444,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.151,
						"acc_norm,none": 0.151,
						"acc_norm_stderr,none": 0.011328165223341673,
						"acc_stderr,none": 0.011328165223341673,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.123,
						"acc_norm,none": 0.123,
						"acc_norm_stderr,none": 0.010391293421849879,
						"acc_stderr,none": 0.010391293421849879,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.101,
						"acc_norm,none": 0.101,
						"acc_norm_stderr,none": 0.009533618929340992,
						"acc_stderr,none": 0.009533618929340992,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.103,
						"acc_norm,none": 0.103,
						"acc_norm_stderr,none": 0.009616833339695784,
						"acc_stderr,none": 0.009616833339695784,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.04408440022768077,
						"acc_stderr,none": 0.04408440022768077,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.065,
						"acc_norm,none": 0.065,
						"acc_norm_stderr,none": 0.007799733061832024,
						"acc_stderr,none": 0.007799733061832024,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.111,
						"acc_norm,none": 0.111,
						"acc_norm_stderr,none": 0.009938701010583726,
						"acc_stderr,none": 0.009938701010583726,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.085,
						"acc_norm,none": 0.085,
						"acc_norm_stderr,none": 0.00882342636694231,
						"acc_stderr,none": 0.00882342636694231,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.013394902889660009,
						"acc_stderr,none": 0.013394902889660009,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.101,
						"acc_norm,none": 0.101,
						"acc_norm_stderr,none": 0.009533618929341,
						"acc_stderr,none": 0.009533618929341,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.192,
						"acc_norm,none": 0.192,
						"acc_norm_stderr,none": 0.012461592646659985,
						"acc_stderr,none": 0.012461592646659985,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.15333333333333332,
						"acc_norm,none": 0.15333333333333332,
						"acc_norm_stderr,none": 0.014721806604031804,
						"acc_stderr,none": 0.014721806604031804,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.14,
						"acc_norm,none": 0.14,
						"acc_norm_stderr,none": 0.010978183844357791,
						"acc_stderr,none": 0.010978183844357791,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.123,
						"acc_norm,none": 0.123,
						"acc_norm_stderr,none": 0.010391293421849877,
						"acc_stderr,none": 0.010391293421849877,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.088,
						"acc_norm,none": 0.088,
						"acc_norm_stderr,none": 0.008963053962592072,
						"acc_stderr,none": 0.008963053962592072,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.107,
						"acc_norm,none": 0.107,
						"acc_norm_stderr,none": 0.009779910359847169,
						"acc_stderr,none": 0.009779910359847169,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.0440844002276808,
						"acc_stderr,none": 0.0440844002276808,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.024698855131686855,
						"acc_stderr,none": 0.024698855131686855,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.072,
						"acc_norm,none": 0.072,
						"acc_norm_stderr,none": 0.008178195576218681,
						"acc_stderr,none": 0.008178195576218681,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.137,
						"acc_norm,none": 0.137,
						"acc_norm_stderr,none": 0.010878848714333332,
						"acc_stderr,none": 0.010878848714333332,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.205,
						"acc_norm,none": 0.205,
						"acc_norm_stderr,none": 0.028617649261360192,
						"acc_stderr,none": 0.028617649261360192,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.145,
						"acc_norm,none": 0.145,
						"acc_norm_stderr,none": 0.011139977517890146,
						"acc_stderr,none": 0.011139977517890146,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.178,
						"acc_norm,none": 0.178,
						"acc_norm_stderr,none": 0.012102167676183596,
						"acc_stderr,none": 0.012102167676183596,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.029365141882663322,
						"acc_stderr,none": 0.029365141882663322,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.094,
						"acc_norm,none": 0.094,
						"acc_norm_stderr,none": 0.00923305200078773,
						"acc_stderr,none": 0.00923305200078773,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5354089015566762,
						"acc_norm,none": 0.57,
						"acc_norm_stderr,none": 0.0004911823647294576,
						"acc_stderr,none": 0.04729088513119564,
						"alias": "kobest",
						"f1,none": 0.45202421669965,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5462962962962963,
						"acc_stderr,none": 0.013291422240187908,
						"alias": " - kobest_boolq",
						"f1,none": 0.4461579619112377,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.01527525231651936,
						"alias": " - kobest_copa",
						"f1,none": 0.6290726817042607,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.44,
						"acc_norm,none": 0.57,
						"acc_norm_stderr,none": 0.02216263442665284,
						"acc_stderr,none": 0.02222133153414306,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.4347868820691948,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5314861460957179,
						"acc_stderr,none": 0.025076077305681312,
						"alias": " - kobest_sentineg",
						"f1,none": 0.40152694028399144,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4873015873015873,
						"acc_stderr,none": 0.014086951987375836,
						"alias": " - kobest_wic",
						"f1,none": 0.340797107787399,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7141470987774112,
						"acc_stderr,none": 0.015852858101128752,
						"alias": "lambada",
						"perplexity,none": 3.8043475336263994,
						"perplexity_stderr,none": 0.2281685057981299
					},
					"lambada_cloze": {
						"acc,none": 0.07374345041723268,
						"acc_stderr,none": 0.004172041356985056,
						"alias": "lambada_cloze",
						"perplexity,none": 374.82042882342057,
						"perplexity_stderr,none": 119.54818849469166
					},
					"lambada_multilingual": {
						"acc,none": 0.5373568794876771,
						"acc_stderr,none": 0.08485396843250168,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.000058641883392,
						"perplexity_stderr,none": 8.215000706142517
					},
					"lambada_openai": {
						"acc,none": 0.7424801086745585,
						"acc_stderr,none": 0.006091999719129262,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3738584556634206,
						"perplexity_stderr,none": 0.06613091584551405
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.06966815447312245,
						"acc_stderr,none": 0.00354689367215175,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 137.577245835032,
						"perplexity_stderr,none": 3.838590854048455
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42751795070832527,
						"acc_stderr,none": 0.0068923954478686475,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.40995706565969,
						"perplexity_stderr,none": 1.9198882405259308
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7432563555210557,
						"acc_stderr,none": 0.006085990070284605,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.376276908213185,
						"perplexity_stderr,none": 0.06624295795502655
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4486706772753736,
						"acc_stderr,none": 0.006929173919665489,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.08178023365845,
						"perplexity_stderr,none": 1.438828440779044
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5476421502037648,
						"acc_stderr,none": 0.006934283157219039,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.477136806072853,
						"perplexity_stderr,none": 0.8029953639024064
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5196972637298661,
						"acc_stderr,none": 0.006960570207731863,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.65514219581279,
						"perplexity_stderr,none": 1.1521232467165174
					},
					"lambada_standard": {
						"acc,none": 0.6850378420337667,
						"acc_stderr,none": 0.00647140444630582,
						"alias": " - lambada_standard",
						"perplexity,none": 4.232314220607136,
						"perplexity_stderr,none": 0.09029581976656109
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.07781874636134291,
						"acc_stderr,none": 0.0037321778637123674,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 612.0636118118091,
						"perplexity_stderr,none": 20.59008680813543
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.2958015267175573,
						"exact_match_stderr,get-answer": 0.011514886263918656
					},
					"logiqa": {
						"acc,none": 0.2457757296466974,
						"acc_norm,none": 0.28417818740399386,
						"acc_norm_stderr,none": 0.017690542680190782,
						"acc_stderr,none": 0.016887410894296937,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.24936386768447838,
						"acc_norm,none": 0.27353689567430023,
						"acc_norm_stderr,none": 0.011246739746251145,
						"acc_stderr,none": 0.01091549419314277,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25996649916247905,
						"acc_norm,none": 0.2733668341708543,
						"acc_norm_stderr,none": 0.008158890612550694,
						"acc_stderr,none": 0.008029434758777933,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.5228765092141495,
						"acc_stderr,none": 0.0051405071358398054,
						"alias": "mc_taco",
						"f1,none": 0.5289135208616543,
						"f1_stderr,none": 0.006200052098530846
					},
					"medmcqa": {
						"acc,none": 0.3275161367439637,
						"acc_norm,none": 0.3275161367439637,
						"acc_norm_stderr,none": 0.007257136149169804,
						"acc_stderr,none": 0.007257136149169804,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3087195600942655,
						"acc_norm,none": 0.3087195600942655,
						"acc_norm_stderr,none": 0.012952859416638277,
						"acc_stderr,none": 0.012952859416638277,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.3321464178891896,
						"acc_stderr,none": 0.06091173753049962,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34074074074074073,
						"acc_stderr,none": 0.04094376269996794,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.28289473684210525,
						"acc_stderr,none": 0.03665349695640767,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.36981132075471695,
						"acc_stderr,none": 0.029711421880107922,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3194444444444444,
						"acc_stderr,none": 0.03899073687357336,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.24855491329479767,
						"acc_stderr,none": 0.03295304696818318,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.04092563958237655,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3702127659574468,
						"acc_stderr,none": 0.03156564682236784,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.040493392977481425,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.296551724137931,
						"acc_stderr,none": 0.03806142687309994,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.25132275132275134,
						"acc_stderr,none": 0.022340482339643895,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.30952380952380953,
						"acc_stderr,none": 0.04134913018303316,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.048783173121456316,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.38387096774193546,
						"acc_stderr,none": 0.027666182075539635,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2955665024630542,
						"acc_stderr,none": 0.032104944337514575,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.4727272727272727,
						"acc_stderr,none": 0.03898531605579419,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.37373737373737376,
						"acc_stderr,none": 0.034468977386593325,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.46113989637305697,
						"acc_stderr,none": 0.03597524411734578,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3153846153846154,
						"acc_stderr,none": 0.02355964698318995,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24444444444444444,
						"acc_stderr,none": 0.02620276653465215,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.31092436974789917,
						"acc_stderr,none": 0.030066761582977924,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23841059602649006,
						"acc_stderr,none": 0.034791855725996586,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3743119266055046,
						"acc_stderr,none": 0.020748959408988316,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.17592592592592593,
						"acc_stderr,none": 0.025967420958258526,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.46568627450980393,
						"acc_stderr,none": 0.03501038327635897,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.4388185654008439,
						"acc_stderr,none": 0.032302649315470375,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3721973094170404,
						"acc_stderr,none": 0.03244305283008731,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3511450381679389,
						"acc_stderr,none": 0.04186445163013751,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3256110520722636,
						"acc_stderr,none": 0.059599616018790984,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.34710743801652894,
						"acc_stderr,none": 0.043457245702925355,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04557239513497751,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.34355828220858897,
						"acc_stderr,none": 0.03731133519673892,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.04287858751340456,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.44660194174757284,
						"acc_stderr,none": 0.04922424153458934,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.405982905982906,
						"acc_stderr,none": 0.03217180182641086,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252606,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.44316730523627074,
						"acc_stderr,none": 0.017764085035348404,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.31213872832369943,
						"acc_stderr,none": 0.024946792225272307,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574906,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3431372549019608,
						"acc_stderr,none": 0.02718449890994161,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.37013196009011906,
						"acc_stderr,none": 0.05585574688367252,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3890675241157556,
						"acc_stderr,none": 0.027690337536485376,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.36728395061728397,
						"acc_stderr,none": 0.026822801759507898,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.26595744680851063,
						"acc_stderr,none": 0.026358065698880585,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2966101694915254,
						"acc_stderr,none": 0.011665946586082852,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.35661764705882354,
						"acc_stderr,none": 0.029097209568411952,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.33169934640522875,
						"acc_stderr,none": 0.019047485239360378,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.39090909090909093,
						"acc_stderr,none": 0.04673752333670238,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3183673469387755,
						"acc_stderr,none": 0.029822533793982062,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.3539161520961976,
						"acc_stderr,none": 0.048395331555541835,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.4228855721393035,
						"acc_stderr,none": 0.034932317774212816,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.28322232794164287,
						"acc_stderr,none": 0.05862955260080505,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.03711725190740749,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.38596491228070173,
						"acc_stderr,none": 0.03733756969066164,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.38023433520122263,
						"acc_stderr,none": 0.004900229212533644,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3686940602115541,
						"acc_stderr,none": 0.00486579894854048,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7475490196078431,
						"acc_stderr,none": 0.02153332842706632,
						"alias": "mrpc",
						"f1,none": 0.8398133748055988,
						"f1_stderr,none": 0.015562063007134155
					},
					"multimedqa": {
						"acc,none": 0.3567068843151171,
						"acc_norm,none": 0.32404988846927757,
						"acc_norm_stderr,none": 0.00010850708035438849,
						"acc_stderr,none": 0.09804760905447438,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.49896864686468645,
						"acc_stderr,none": 0.0071817878275523515,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7002445464075016,
						"mrr_stderr,none": 0.010317326523278112,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.42325056433408575,
						"r@2_stderr,none": 0.016608129658774624
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6578254342819175,
						"mrr_stderr,none": 0.010477833998742761,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.463882618510158,
						"r@2_stderr,none": 0.016763409667403396
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.011634349030470914,
						"exact_match_stderr,remove_whitespace": 0.0017849926209927887
					},
					"openbookqa": {
						"acc,none": 0.302,
						"acc_norm,none": 0.412,
						"acc_norm_stderr,none": 0.022033677993740862,
						"acc_stderr,none": 0.020553269174209198,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.401,
						"acc_stderr,none": 0.01096173251771343,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.375,
						"acc_stderr,none": 0.010828024891988879,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3695,
						"acc_stderr,none": 0.010795515113846478,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5385,
						"acc_stderr,none": 0.011149934327957061,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5215,
						"acc_stderr,none": 0.011172792428275121,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.4825,
						"acc_stderr,none": 0.011176284251254187,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5005,
						"acc_stderr,none": 0.01118313042949518,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4555,
						"acc_stderr,none": 0.05501642166188383,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7704026115342764,
						"acc_norm,none": 0.7725788900979326,
						"acc_norm_stderr,none": 0.009779850767847244,
						"acc_stderr,none": 0.009812682950815192,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.24359521776259607,
						"acc_norm,none": 0.27940862510674636,
						"acc_norm_stderr,none": 0.0032782161477599913,
						"acc_stderr,none": 0.0031360621671939616,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.01987435483128749,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.743687308145615,
						"acc_norm,none": 0.6277400828170847,
						"acc_norm_stderr,none": 0.010306063670327702,
						"acc_stderr,none": 0.14382358537776974,
						"alias": "pythia",
						"bits_per_byte,none": 0.6325288887179478,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5502800869079052,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3738584556634206,
						"perplexity_stderr,none": 0.06613091584551405,
						"word_perplexity,none": 10.428191022549841,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3900709219858156,
						"acc_norm,none": 0.4308510638297872,
						"acc_norm_stderr,none": 0.04969392170788078,
						"acc_stderr,none": 0.03996448391177798,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.45,
						"acc_norm,none": 0.55,
						"acc_norm_stderr,none": 0.04560517440787951,
						"acc_stderr,none": 0.04560517440787952,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3625,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.0392039498715957,
						"acc_stderr,none": 0.038123743406448925,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.38028169014084506,
						"acc_norm,none": 0.38380281690140844,
						"acc_norm_stderr,none": 0.028908177688046176,
						"acc_stderr,none": 0.028857363751758302,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.49679663188724144,
						"acc_stderr,none": 0.006765271702920654,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.6767746722730645,
						"acc_stderr,none": 0.0023260992496098544,
						"alias": "qqp",
						"f1,none": 0.6871289025090979,
						"f1_stderr,none": 0.0025998039997248736
					},
					"race": {
						"acc,none": 0.36076555023923446,
						"acc_stderr,none": 0.014862517074604975,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2523,
						"em_stderr,none": 0.004343542061010362,
						"f1,none": 0.26155857166051866,
						"f1_stderr,none": 0.004358518434111173
					},
					"rte": {
						"acc,none": 0.5884476534296029,
						"acc_stderr,none": 0.029621832222417196,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.955,
						"acc_norm,none": 0.93,
						"acc_norm_stderr,none": 0.008072494358323499,
						"acc_stderr,none": 0.00655881224140613,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5884476534296029,
						"acc_stderr,none": 0.029621832222417196,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.9094036697247706,
						"acc_stderr,none": 0.009725783032052356,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5680295911226632,
						"acc_norm,none": 0.7658702389283215,
						"acc_norm_stderr,none": 0.0029938954474274457,
						"acc_stderr,none": 0.003502218204723479,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6461681807593758,
						"acc_stderr,none": 0.07201369718907223,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5372596153846154,
						"acc_stderr,none": 0.0049903414782819875,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.823553258335867,
						"acc_stderr,none": 0.0038377979875830292,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5811764705882353,
						"acc_stderr,none": 0.004885294527471591,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"triviaqa": {
						"alias": "triviaqa",
						"exact_match,remove_whitespace": 0.020006687472135534,
						"exact_match_stderr,remove_whitespace": 0.0010453256844523634
					},
					"truthfulqa": {
						"acc,none": 0.3302969599916217,
						"acc_stderr,none": 0.001652651966553619,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3488372093023256,
						"bleu_acc_stderr,none": 0.016684419859986886,
						"bleu_diff,none": -4.7676025757365466,
						"bleu_diff_stderr,none": 0.9399419703452422,
						"bleu_max,none": 28.583595303310176,
						"bleu_max_stderr,none": 0.8227729682715753,
						"rouge1_acc,none": 0.3157894736842105,
						"rouge1_acc_stderr,none": 0.01627228795791692,
						"rouge1_diff,none": -5.9031435220229875,
						"rouge1_diff_stderr,none": 1.0814414904516871,
						"rouge1_max,none": 53.89250627842206,
						"rouge1_max_stderr,none": 0.8743965162035956,
						"rouge2_acc,none": 0.29008567931456547,
						"rouge2_acc_stderr,none": 0.01588623687420952,
						"rouge2_diff,none": -7.549674880493665,
						"rouge2_diff_stderr,none": 1.2612143030210712,
						"rouge2_max,none": 38.248938544143805,
						"rouge2_max_stderr,none": 1.0558298668477941,
						"rougeL_acc,none": 0.32802937576499386,
						"rougeL_acc_stderr,none": 0.016435632932815032,
						"rougeL_diff,none": -5.8769273011964565,
						"rougeL_diff_stderr,none": 1.098816821965796,
						"rougeL_max,none": 51.29782509190946,
						"rougeL_max_stderr,none": 0.8984521547089732
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3488372093023256,
						"bleu_acc_stderr,none": 0.016684419859986886,
						"bleu_diff,none": -4.7676025757365466,
						"bleu_diff_stderr,none": 0.9399419703452422,
						"bleu_max,none": 28.583595303310176,
						"bleu_max_stderr,none": 0.8227729682715753,
						"rouge1_acc,none": 0.3157894736842105,
						"rouge1_acc_stderr,none": 0.01627228795791692,
						"rouge1_diff,none": -5.9031435220229875,
						"rouge1_diff_stderr,none": 1.0814414904516871,
						"rouge1_max,none": 53.89250627842206,
						"rouge1_max_stderr,none": 0.8743965162035956,
						"rouge2_acc,none": 0.29008567931456547,
						"rouge2_acc_stderr,none": 0.01588623687420952,
						"rouge2_diff,none": -7.549674880493665,
						"rouge2_diff_stderr,none": 1.2612143030210712,
						"rouge2_max,none": 38.248938544143805,
						"rouge2_max_stderr,none": 1.0558298668477941,
						"rougeL_acc,none": 0.32802937576499386,
						"rougeL_acc_stderr,none": 0.016435632932815032,
						"rougeL_diff,none": -5.8769273011964565,
						"rougeL_diff_stderr,none": 1.098816821965796,
						"rougeL_max,none": 51.29782509190946,
						"rougeL_max_stderr,none": 0.8984521547089732
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25458996328029376,
						"acc_stderr,none": 0.015250117079156507,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.406040041544547,
						"acc_stderr,none": 0.014335281713396954,
						"alias": "truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"wic": {
						"acc,none": 0.5799373040752351,
						"acc_stderr,none": 0.01955590253723442,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6325288887179478,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5502800869079052,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.428191022549841,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6740331491712708,
						"acc_stderr,none": 0.013173782636922184,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4225352112676056,
						"acc_stderr,none": 0.05903984205682581,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.04926646390821466,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8168498168498168,
						"acc_stderr,none": 0.023452564261704997,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6223636363636363,
						"acc_stderr,none": 0.07056280911012106,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.021930844120728505,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231333,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.019920483209566058,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.020099950647503233,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928028,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.022270877485360437,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.646,
						"acc_stderr,none": 0.021407582047916447,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.02018670369357085,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.02047511809298897,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43991967871485943,
						"acc_stderr,none": 0.04533624542720319,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.336144578313253,
						"acc_stderr,none": 0.00946863466929353,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.46586345381526106,
						"acc_stderr,none": 0.009998688066102651,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4827309236947791,
						"acc_stderr,none": 0.010016093498409711,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39879518072289155,
						"acc_stderr,none": 0.009814625416137573,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5373493975903615,
						"acc_stderr,none": 0.009994072620561414,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4975903614457831,
						"acc_stderr,none": 0.01002195648306809,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4887550200803213,
						"acc_stderr,none": 0.010019537972975081,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43654618473895584,
						"acc_stderr,none": 0.009941039791133126,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4923694779116466,
						"acc_stderr,none": 0.010020905731542313,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39397590361445783,
						"acc_stderr,none": 0.009794163014906754,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41847389558232934,
						"acc_stderr,none": 0.009887951897505931,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4610441767068273,
						"acc_stderr,none": 0.009991608448389061,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41365461847389556,
						"acc_stderr,none": 0.009871502159099368,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40803212851405624,
						"acc_stderr,none": 0.009851078965044873,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3674698795180723,
						"acc_stderr,none": 0.009663601903728022,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6331748992238734,
						"acc_stderr,none": 0.053129590825981766,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.599602911978822,
						"acc_stderr,none": 0.012609238175551173,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7802779616148247,
						"acc_stderr,none": 0.01065547970935364,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7094639311714097,
						"acc_stderr,none": 0.011683600935499845,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5651886168100596,
						"acc_stderr,none": 0.012757297463352966,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6048974189278623,
						"acc_stderr,none": 0.012580772976133263,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6631369953673064,
						"acc_stderr,none": 0.012162974996136387,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5466578424884183,
						"acc_stderr,none": 0.012810980537828172,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6915949702183984,
						"acc_stderr,none": 0.0118849720733138,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5592322964923891,
						"acc_stderr,none": 0.012776518586332792,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5969556585043018,
						"acc_stderr,none": 0.012622895215907709,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6479152878888154,
						"acc_stderr,none": 0.01229119826167458,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8042256686895932,
						"acc_stderr,none": 0.03648190721834694,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8602150537634409,
						"acc_stderr,none": 0.007193092732936861,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.0500664280504192,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7518248175182481,
						"acc_stderr,none": 0.013955800392484946,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7566539923954373,
						"acc_stderr,none": 0.02651002461891978,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.653968253968254,
						"acc_stderr,none": 0.026845499021972877,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7797619047619048,
						"acc_stderr,none": 0.01847750104905629,
						"alias": " - xwinograd_zh"
					}
				}
			},
			"trust_remote_code=True": {
				"confObj": {
					"trust_remote_code": "True"
				},
				"confStr": "trust_remote_code=True",
				"groups": {
					"lambada_multilingual": {
						"acc,none": 0.5373180671453522,
						"acc_stderr,none": 0.084854928421691,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.99725893141735,
						"perplexity_stderr,none": 8.213261901364534
					},
					"pawsx": {
						"acc,none": 0.4555,
						"acc_stderr,none": 0.05413647422159046,
						"alias": "pawsx"
					},
					"xcopa": {
						"acc,none": 0.6216363636363637,
						"acc_stderr,none": 0.07089543455105124,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43981258366800535,
						"acc_stderr,none": 0.04822691235412853,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6327537452620179,
						"acc_stderr,none": 0.06037673446050065,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8044504383007417,
						"acc_stderr,none": 0.036551420135079914,
						"alias": "xwinograd"
					}
				},
				"results": {
					"lambada_multilingual": {
						"acc,none": 0.5373180671453522,
						"acc_stderr,none": 0.084854928421691,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.99725893141735,
						"perplexity_stderr,none": 8.213261901364534
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42732388899670093,
						"acc_stderr,none": 0.00689199878844782,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.40417838764442,
						"perplexity_stderr,none": 1.9194150131315955
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7432563555210557,
						"acc_stderr,none": 0.006085990070284606,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3752600927697225,
						"perplexity_stderr,none": 0.06620127895333149
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.44905880069862214,
						"acc_stderr,none": 0.006929729843881883,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.07358750583119,
						"perplexity_stderr,none": 1.4383741470995577
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5476421502037648,
						"acc_stderr,none": 0.006934283157219039,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.47843119038625,
						"perplexity_stderr,none": 0.8030140849186048
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5193091403066175,
						"acc_stderr,none": 0.0069607812884263836,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.65483748045518,
						"perplexity_stderr,none": 1.152131238974256
					},
					"paws_de": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.010957190790298967,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3765,
						"acc_stderr,none": 0.010836631916589663,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3725,
						"acc_stderr,none": 0.01081343332018479,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5385,
						"acc_stderr,none": 0.011149934327957061,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.519,
						"acc_stderr,none": 0.011175058879956061,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.4845,
						"acc_stderr,none": 0.01117776123260332,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4975,
						"acc_stderr,none": 0.011182996230990788,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4555,
						"acc_stderr,none": 0.05413647422159046,
						"alias": "pawsx"
					},
					"xcopa": {
						"acc,none": 0.6216363636363637,
						"acc_stderr,none": 0.07089543455105124,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.594,
						"acc_stderr,none": 0.021983962090086333,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.02236516042423134,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.019966103540279462,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.020099950647503237,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928028,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.021352091786223104,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.02018670369357085,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.020475118092988947,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43981258366800535,
						"acc_stderr,none": 0.04822691235412853,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.336144578313253,
						"acc_stderr,none": 0.00946863466929354,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4670682730923695,
						"acc_stderr,none": 0.010000311392557843,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4819277108433735,
						"acc_stderr,none": 0.010015524156629818,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39718875502008033,
						"acc_stderr,none": 0.0098079150706773,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5373493975903615,
						"acc_stderr,none": 0.009994072620561418,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.010022072867228943,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4879518072289157,
						"acc_stderr,none": 0.010019162857624494,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.4357429718875502,
						"acc_stderr,none": 0.009938966706641343,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4879518072289157,
						"acc_stderr,none": 0.01001916285762449,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3923694779116466,
						"acc_stderr,none": 0.009787120838990105,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42008032128514056,
						"acc_stderr,none": 0.009893219469115701,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.45943775100401607,
						"acc_stderr,none": 0.009989039874786899,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41325301204819276,
						"acc_stderr,none": 0.009870087435623787,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40803212851405624,
						"acc_stderr,none": 0.009851078965044863,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.37269076305220883,
						"acc_stderr,none": 0.009691761259693463,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6327537452620179,
						"acc_stderr,none": 0.06037673446050065,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5969556585043018,
						"acc_stderr,none": 0.012622895215907709,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7796161482461945,
						"acc_stderr,none": 0.01066698842905873,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7094639311714097,
						"acc_stderr,none": 0.011683600935499847,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5645268034414295,
						"acc_stderr,none": 0.012759525506489235,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6029119788219722,
						"acc_stderr,none": 0.012591627740247465,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6631369953673064,
						"acc_stderr,none": 0.01216297499613639,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5466578424884183,
						"acc_stderr,none": 0.012810980537828155,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6929185969556585,
						"acc_stderr,none": 0.011870783739438435,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5592322964923891,
						"acc_stderr,none": 0.01277651858633279,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5962938451356717,
						"acc_stderr,none": 0.012626249735246583,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6485771012574454,
						"acc_stderr,none": 0.01228591087173833,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8044504383007417,
						"acc_stderr,none": 0.036551420135079914,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8597849462365591,
						"acc_stderr,none": 0.0072023492671659355,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.05006642805041919,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7539103232533889,
						"acc_stderr,none": 0.013916300191059485,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7604562737642585,
						"acc_stderr,none": 0.026368102510190856,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6507936507936508,
						"acc_stderr,none": 0.026902825537698707,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7797619047619048,
						"acc_stderr,none": 0.018477501049056298,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "RWKV/v5-Eagle-7B-HF"
	},
	"RWKV/v5-EagleX-v2-7B-HF": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6358511837655016,
						"acc_norm,none": 0.6183765501691093,
						"acc_norm_stderr,none": 0.07620513869563776,
						"acc_stderr,none": 0.104226283875455,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.5096875,
						"acc_stderr,none": 0.05249191363131572,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.17105,
						"acc_stderr,none": 0.23351488898631376,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8272835820895522,
						"acc_stderr,none": 0.1567067313802052,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.25780089153046054,
						"acc_norm,none": 0.25780089153046054,
						"acc_norm_stderr,none": 0.11450610305641491,
						"acc_stderr,none": 0.11450610305641491,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.28742876877914014,
						"acc_norm,none": 0.28742876877914014,
						"acc_norm_stderr,none": 0.04861573319518032,
						"acc_stderr,none": 0.04861573319518032,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.7119577370304113,
						"likelihood_diff_stderr,none": 0.5175442864086851,
						"pct_stereotype,none": 0.6122540250447227,
						"pct_stereotype_stderr,none": 0.07088079574874108
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05216535433070866,
						"exact_match_stderr,none": 0.004934037077281569
					},
					"glue": {
						"acc,none": 0.7439256788947117,
						"acc_stderr,none": 0.004486672404747918,
						"alias": "glue",
						"f1,none": 0.7260010895764197,
						"f1_stderr,none": 7.71937501333283e-05,
						"mcc,none": 0.2004665069808333,
						"mcc_stderr,none": 0.03232600167796314
					},
					"kmmlu": {
						"acc,none": 0.26393300606410625,
						"acc_norm,none": 0.26393300606410625,
						"acc_norm_stderr,none": 0.025680395024354815,
						"acc_stderr,none": 0.025680395024354815,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5755316816487612,
						"acc_norm,none": 0.54,
						"acc_norm_stderr,none": 0.0004977955911823678,
						"acc_stderr,none": 0.06205627133843412,
						"alias": "kobest",
						"f1,none": 0.5530043869539744,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7265670483213662,
						"acc_stderr,none": 0.014492379122331596,
						"alias": "lambada",
						"perplexity,none": 3.385994977942082,
						"perplexity_stderr,none": 0.1478991564303529
					},
					"lambada_cloze": {
						"acc,none": 0.07723656122647002,
						"acc_stderr,none": 0.009489042126827388,
						"alias": "lambada_cloze",
						"perplexity,none": 173.8268707381544,
						"perplexity_stderr,none": 5.6239198695865635
					},
					"lambada_multilingual": {
						"acc,none": 0.5541238113720163,
						"acc_stderr,none": 0.08228397169239214,
						"alias": "lambada_multilingual",
						"perplexity,none": 18.83756081520726,
						"perplexity_stderr,none": 7.364939712028874
					},
					"mmlu": {
						"acc,none": 0.4383990884489389,
						"acc_stderr,none": 0.10033766244215783,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.40488841657810837,
						"acc_stderr,none": 0.11008578713788937,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.4962986803990988,
						"acc_stderr,none": 0.08634005899574593,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.49821254468638276,
						"acc_stderr,none": 0.08020096682742794,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.37297811607992387,
						"acc_stderr,none": 0.08288923614920533,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.4107877927608233,
						"acc_norm,none": 0.3841349722778492,
						"acc_norm_stderr,none": 0.00010642961870287373,
						"acc_stderr,none": 0.05381311790660645,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4702857142857143,
						"acc_stderr,none": 0.051808977072434335,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7524294879202531,
						"acc_norm,none": 0.622951179453004,
						"acc_norm_stderr,none": 0.007880964636376735,
						"acc_stderr,none": 0.1504210776127774,
						"alias": "pythia",
						"bits_per_byte,none": 0.6393202013936226,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5575950468323259,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.1231306730085664,
						"perplexity_stderr,none": 0.060657249606188,
						"word_perplexity,none": 10.694026261310556,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3723404255319149,
						"acc_norm,none": 0.425531914893617,
						"acc_norm_stderr,none": 0.054361997567311325,
						"acc_stderr,none": 0.04760751849429055,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.669095870353732,
						"acc_stderr,none": 0.07259776552887541,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.34013043916503455,
						"acc_stderr,none": 0.0015214412084548238,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3402692778457772,
						"bleu_acc_stderr,none": 0.016586304901762553,
						"bleu_diff,none": -6.3885216667008216,
						"bleu_diff_stderr,none": 0.8237728208335762,
						"bleu_max,none": 25.848948085807137,
						"bleu_max_stderr,none": 0.7864128636857829,
						"rouge1_acc,none": 0.30966952264381886,
						"rouge1_acc_stderr,none": 0.016185744355144912,
						"rouge1_diff,none": -8.407910889709958,
						"rouge1_diff_stderr,none": 0.9109032325137808,
						"rouge1_max,none": 51.46798847863768,
						"rouge1_max_stderr,none": 0.8544738802487006,
						"rouge2_acc,none": 0.27539779681762544,
						"rouge2_acc_stderr,none": 0.015638135667775523,
						"rouge2_diff,none": -9.952391260329206,
						"rouge2_diff_stderr,none": 1.0960462809375815,
						"rouge2_max,none": 35.498492375426586,
						"rouge2_max_stderr,none": 0.9955292745738608,
						"rougeL_acc,none": 0.3108935128518972,
						"rougeL_acc_stderr,none": 0.016203316673559693,
						"rougeL_diff,none": -8.54103962629304,
						"rougeL_diff_stderr,none": 0.9259882554244762,
						"rougeL_max,none": 48.57435182019648,
						"rougeL_max_stderr,none": 0.868054036707988
					},
					"xcopa": {
						"acc,none": 0.6225454545454545,
						"acc_stderr,none": 0.07069309770879728,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43815261044176707,
						"acc_stderr,none": 0.04704126251631503,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6329944046687925,
						"acc_stderr,none": 0.054330125954133716,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8154641492470218,
						"acc_stderr,none": 0.03811008839661942,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6358511837655016,
						"acc_norm,none": 0.6183765501691093,
						"acc_norm_stderr,none": 0.07620513869563776,
						"acc_stderr,none": 0.104226283875455,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.5096875,
						"acc_stderr,none": 0.05249191363131572,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.618,
						"acc_stderr,none": 0.015372453034968526,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.455,
						"acc_stderr,none": 0.01575510149834709,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.465,
						"acc_stderr,none": 0.014404353664908238,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.41552901023890787,
						"acc_norm,none": 0.45819112627986347,
						"acc_norm_stderr,none": 0.0145602203087147,
						"acc_stderr,none": 0.014401366641216386,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7445286195286195,
						"acc_norm,none": 0.6973905723905723,
						"acc_norm_stderr,none": 0.009426434542371227,
						"acc_stderr,none": 0.00894911355166556,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.17105,
						"acc_stderr,none": 0.23351488898631376,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.095,
						"acc_stderr,none": 0.00655812507522166,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.3605,
						"acc_stderr,none": 0.010739066010104792,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.057,
						"acc_stderr,none": 0.0051854550882478225,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.9275,
						"acc_stderr,none": 0.0057998874426297645,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0055,
						"acc_stderr,none": 0.0016541593398342205,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.16,
						"acc_stderr,none": 0.008199610771762489,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.003,
						"acc_stderr,none": 0.0012232122154647114,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.06,
						"acc_stderr,none": 0.005311695308799959,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.000706929893933947,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.041,
						"acc_stderr,none": 0.004435012363831025,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0017353579175704988,
						"acc_stderr,none": 0.0008671138796248289,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8272835820895522,
						"acc_stderr,none": 0.1567067313802052,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403633,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469323,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340983,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406725,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557844,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.766,
						"acc_stderr,none": 0.013394902889660013,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295437,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491113,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036216,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163037,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745029993,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426126,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.00751375115747492,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140927,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178327,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.011171786285496501,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.671,
						"acc_stderr,none": 0.014865395385928364,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.014356395999905684,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792926,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.01031821038094609,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.0043194510829106724,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.015605111967541949,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996685,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.011772110370812197,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.597,
						"acc_stderr,none": 0.015518757419066536,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.721,
						"acc_stderr,none": 0.014190150117612042,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.00985982840703719,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832022,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.00896305396259206,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704159,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.767,
						"acc_stderr,none": 0.013374972519220062,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792935,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.343,
						"acc_stderr,none": 0.015019206922356951,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.01507060460376841,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.614,
						"acc_stderr,none": 0.015402637476784362,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.014658474370509008,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.565,
						"acc_stderr,none": 0.0156850572527172,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946092,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523722,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.012953717566737227,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000102,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.00969892102602494,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910642,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524293,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.737,
						"acc_stderr,none": 0.01392928659425974,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.012841374572096921,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427418,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.00932045443478325,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469308,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.604,
						"acc_stderr,none": 0.015473313265859406,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.015680876566375058,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.011598902298689009,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318225,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.015090650341444235,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.01177211037081219,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.00927691010310331,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.278,
						"acc_stderr,none": 0.014174516461485251,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.821,
						"acc_stderr,none": 0.012128730605719121,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400238,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855743,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.005568393575081361,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.394,
						"acc_stderr,none": 0.015459721957493379,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.015499685165842597,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6697247706422018,
						"acc_stderr,none": 0.00822581091427727,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8928571428571429,
						"acc_stderr,none": 0.04170530058008159,
						"alias": "cb",
						"f1,none": 0.724616858237548,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.25780089153046054,
						"acc_norm,none": 0.25780089153046054,
						"acc_norm_stderr,none": 0.11450610305641491,
						"acc_stderr,none": 0.11450610305641491,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.07872958216222171,
						"acc_stderr,none": 0.07872958216222171,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2978723404255319,
						"acc_norm,none": 0.2978723404255319,
						"acc_norm_stderr,none": 0.06742861107915606,
						"acc_stderr,none": 0.06742861107915606,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445796,
						"acc_stderr,none": 0.06180629713445796,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063838,
						"acc_stderr,none": 0.07213122508063838,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.052631578947368404,
						"acc_stderr,none": 0.052631578947368404,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.060073850409370216,
						"acc_stderr,none": 0.060073850409370216,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.11236664374387367,
						"acc_stderr,none": 0.11236664374387367,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.07335878043508444,
						"acc_stderr,none": 0.07335878043508444,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.32608695652173914,
						"acc_norm,none": 0.32608695652173914,
						"acc_norm_stderr,none": 0.06988152725357213,
						"acc_stderr,none": 0.06988152725357213,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.28742876877914014,
						"acc_norm,none": 0.28742876877914014,
						"acc_norm_stderr,none": 0.04861573319518032,
						"acc_stderr,none": 0.04861573319518032,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28994082840236685,
						"acc_norm,none": 0.28994082840236685,
						"acc_norm_stderr,none": 0.03500638924911012,
						"acc_stderr,none": 0.03500638924911012,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.03782614981812041,
						"acc_stderr,none": 0.03782614981812041,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2606060606060606,
						"acc_norm,none": 0.2606060606060606,
						"acc_norm_stderr,none": 0.03427743175816524,
						"acc_stderr,none": 0.03427743175816524,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.32057416267942584,
						"acc_norm,none": 0.32057416267942584,
						"acc_norm_stderr,none": 0.03235963541722358,
						"acc_stderr,none": 0.03235963541722358,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.03515674134876764,
						"acc_stderr,none": 0.03515674134876764,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2824427480916031,
						"acc_norm,none": 0.2824427480916031,
						"acc_norm_stderr,none": 0.03948406125768361,
						"acc_stderr,none": 0.03948406125768361,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.037970424962817856,
						"acc_stderr,none": 0.037970424962817856,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.308411214953271,
						"acc_norm,none": 0.308411214953271,
						"acc_norm_stderr,none": 0.04485760883316697,
						"acc_stderr,none": 0.04485760883316697,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2848297213622291,
						"acc_norm,none": 0.2848297213622291,
						"acc_norm_stderr,none": 0.025151821686179503,
						"acc_stderr,none": 0.025151821686179503,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.030964517926923382,
						"acc_stderr,none": 0.030964517926923382,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3240223463687151,
						"acc_norm,none": 0.3240223463687151,
						"acc_norm_stderr,none": 0.03507871288800094,
						"acc_stderr,none": 0.03507871288800094,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.02830465794303529,
						"acc_stderr,none": 0.02830465794303529,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.40186915887850466,
						"acc_norm,none": 0.40186915887850466,
						"acc_norm_stderr,none": 0.04761979313593575,
						"acc_stderr,none": 0.04761979313593575,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.330188679245283,
						"acc_norm,none": 0.330188679245283,
						"acc_norm_stderr,none": 0.045894715469579954,
						"acc_stderr,none": 0.045894715469579954,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809826,
						"acc_stderr,none": 0.039578354719809826,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.25274725274725274,
						"acc_norm,none": 0.25274725274725274,
						"acc_norm_stderr,none": 0.026350722655564394,
						"acc_stderr,none": 0.026350722655564394,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3137254901960784,
						"acc_norm,none": 0.3137254901960784,
						"acc_norm_stderr,none": 0.03256685484460388,
						"acc_stderr,none": 0.03256685484460388,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.26900584795321636,
						"acc_norm,none": 0.26900584795321636,
						"acc_norm_stderr,none": 0.0340105262010409,
						"acc_stderr,none": 0.0340105262010409,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.03694846055443904,
						"acc_stderr,none": 0.03694846055443904,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.32075471698113206,
						"acc_norm,none": 0.32075471698113206,
						"acc_norm_stderr,none": 0.03713396279871006,
						"acc_stderr,none": 0.03713396279871006,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.32515337423312884,
						"acc_norm,none": 0.32515337423312884,
						"acc_norm_stderr,none": 0.03680350371286462,
						"acc_stderr,none": 0.03680350371286462,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.034300856070148815,
						"acc_stderr,none": 0.034300856070148815,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2896825396825397,
						"acc_norm,none": 0.2896825396825397,
						"acc_norm_stderr,none": 0.028631924753360995,
						"acc_stderr,none": 0.028631924753360995,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.031911782267135466,
						"acc_stderr,none": 0.031911782267135466,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.41596638655462187,
						"acc_norm,none": 0.41596638655462187,
						"acc_norm_stderr,none": 0.03201650100739615,
						"acc_stderr,none": 0.03201650100739615,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2782608695652174,
						"acc_norm,none": 0.2782608695652174,
						"acc_norm_stderr,none": 0.029614094221633722,
						"acc_stderr,none": 0.029614094221633722,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2784090909090909,
						"acc_norm,none": 0.2784090909090909,
						"acc_norm_stderr,none": 0.03388193526335356,
						"acc_stderr,none": 0.03388193526335356,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2953020134228188,
						"acc_norm,none": 0.2953020134228188,
						"acc_norm_stderr,none": 0.03749763364527049,
						"acc_stderr,none": 0.03749763364527049,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3135593220338983,
						"acc_norm,none": 0.3135593220338983,
						"acc_norm_stderr,none": 0.04289122333662572,
						"acc_stderr,none": 0.04289122333662572,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2636363636363636,
						"acc_norm,none": 0.2636363636363636,
						"acc_norm_stderr,none": 0.04220224692971987,
						"acc_stderr,none": 0.04220224692971987,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.27972027972027974,
						"acc_norm,none": 0.27972027972027974,
						"acc_norm_stderr,none": 0.037667638895398536,
						"acc_stderr,none": 0.037667638895398536,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.040061680838488774,
						"acc_stderr,none": 0.040061680838488774,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3023255813953488,
						"acc_norm,none": 0.3023255813953488,
						"acc_norm_stderr,none": 0.0351209126342837,
						"acc_stderr,none": 0.0351209126342837,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26277372262773724,
						"acc_norm,none": 0.26277372262773724,
						"acc_norm_stderr,none": 0.021736991810864862,
						"acc_stderr,none": 0.021736991810864862,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.37850467289719625,
						"acc_norm,none": 0.37850467289719625,
						"acc_norm_stderr,none": 0.033232633255714746,
						"acc_stderr,none": 0.033232633255714746,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2764227642276423,
						"acc_norm,none": 0.2764227642276423,
						"acc_norm_stderr,none": 0.0404901546062249,
						"acc_stderr,none": 0.0404901546062249,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.29508196721311475,
						"acc_norm,none": 0.29508196721311475,
						"acc_norm_stderr,none": 0.04146178164901212,
						"acc_stderr,none": 0.04146178164901212,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.319047619047619,
						"acc_norm,none": 0.319047619047619,
						"acc_norm_stderr,none": 0.03224133248962465,
						"acc_stderr,none": 0.03224133248962465,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.034251778896020865,
						"acc_stderr,none": 0.034251778896020865,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31216931216931215,
						"acc_norm,none": 0.31216931216931215,
						"acc_norm_stderr,none": 0.03379535035917228,
						"acc_stderr,none": 0.03379535035917228,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.04126514736324099,
						"acc_stderr,none": 0.04126514736324099,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.296551724137931,
						"acc_norm,none": 0.296551724137931,
						"acc_norm_stderr,none": 0.038061426873099935,
						"acc_stderr,none": 0.038061426873099935,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.04429811949614585,
						"acc_stderr,none": 0.04429811949614585,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.0340385177358705,
						"acc_stderr,none": 0.0340385177358705,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767256,
						"acc_stderr,none": 0.030113040167767256,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.02251703243459229,
						"acc_stderr,none": 0.02251703243459229,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.33189655172413796,
						"acc_norm,none": 0.33189655172413796,
						"acc_norm_stderr,none": 0.03098255553570088,
						"acc_stderr,none": 0.03098255553570088,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3045977011494253,
						"acc_norm,none": 0.3045977011494253,
						"acc_norm_stderr,none": 0.03499115838809175,
						"acc_stderr,none": 0.03499115838809175,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3141592920353982,
						"acc_norm,none": 0.3141592920353982,
						"acc_norm_stderr,none": 0.030945344741493037,
						"acc_stderr,none": 0.030945344741493037,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.036085410115739666,
						"acc_stderr,none": 0.036085410115739666,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.032092816451453864,
						"acc_stderr,none": 0.032092816451453864,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.27218934911242604,
						"acc_norm,none": 0.27218934911242604,
						"acc_norm_stderr,none": 0.03433919627548533,
						"acc_stderr,none": 0.03433919627548533,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2732919254658385,
						"acc_norm,none": 0.2732919254658385,
						"acc_norm_stderr,none": 0.03523168397737091,
						"acc_stderr,none": 0.03523168397737091,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.036342189215581536,
						"acc_stderr,none": 0.036342189215581536,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.2004665069808333,
						"mcc_stderr,none": 0.03232600167796314
					},
					"copa": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.03684529491774711,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.7119577370304113,
						"likelihood_diff_stderr,none": 0.5175442864086851,
						"pct_stereotype,none": 0.6122540250447227,
						"pct_stereotype_stderr,none": 0.07088079574874108
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.9825581395348837,
						"likelihood_diff_stderr,none": 0.09356936960341339,
						"pct_stereotype,none": 0.6446034585569469,
						"pct_stereotype_stderr,none": 0.011691383517451224
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.123626373626373,
						"likelihood_diff_stderr,none": 0.3956344425402566,
						"pct_stereotype,none": 0.6813186813186813,
						"pct_stereotype_stderr,none": 0.04911704114831279
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.431818181818182,
						"likelihood_diff_stderr,none": 1.6086029049411554,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.084615384615384,
						"likelihood_diff_stderr,none": 0.6100933124138233,
						"pct_stereotype,none": 0.7692307692307693,
						"pct_stereotype_stderr,none": 0.05266563052934292
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.80390625,
						"likelihood_diff_stderr,none": 0.16943962815706046,
						"pct_stereotype,none": 0.6,
						"pct_stereotype_stderr,none": 0.027429019252949587
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.931134259259259,
						"likelihood_diff_stderr,none": 0.26690492211358713,
						"pct_stereotype,none": 0.5972222222222222,
						"pct_stereotype_stderr,none": 0.033448873829978666
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.501736111111111,
						"likelihood_diff_stderr,none": 0.3848829181935175,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.864419291338583,
						"likelihood_diff_stderr,none": 0.16830750773083006,
						"pct_stereotype,none": 0.562992125984252,
						"pct_stereotype_stderr,none": 0.022028849296085076
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.13963963963964,
						"likelihood_diff_stderr,none": 0.3855024626336493,
						"pct_stereotype,none": 0.7477477477477478,
						"pct_stereotype_stderr,none": 0.04140938118194943
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.365591397849462,
						"likelihood_diff_stderr,none": 0.5114015203492834,
						"pct_stereotype,none": 0.8924731182795699,
						"pct_stereotype_stderr,none": 0.03229700003364003
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.440789473684211,
						"likelihood_diff_stderr,none": 0.24471702249046992,
						"pct_stereotype,none": 0.7,
						"pct_stereotype_stderr,none": 0.03333333333333336
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.440257901013715,
						"likelihood_diff_stderr,none": 0.08072683504058403,
						"pct_stereotype,none": 0.5796064400715564,
						"pct_stereotype_stderr,none": 0.01205750973418372
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.3944444444444444,
						"likelihood_diff_stderr,none": 0.31160396577774363,
						"pct_stereotype,none": 0.6222222222222222,
						"pct_stereotype_stderr,none": 0.051392052067171366
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.3365384615384617,
						"likelihood_diff_stderr,none": 1.0189888440507104,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.208333333333333,
						"likelihood_diff_stderr,none": 0.5205168035834604,
						"pct_stereotype,none": 0.7424242424242424,
						"pct_stereotype_stderr,none": 0.054240275510565296
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.935747663551402,
						"likelihood_diff_stderr,none": 0.1482677731672535,
						"pct_stereotype,none": 0.6105919003115264,
						"pct_stereotype_stderr,none": 0.027258566978193188
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.70899209486166,
						"likelihood_diff_stderr,none": 0.20854601107412696,
						"pct_stereotype,none": 0.4189723320158103,
						"pct_stereotype_stderr,none": 0.03108070121761647
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.4878472222222223,
						"likelihood_diff_stderr,none": 0.3977526340910126,
						"pct_stereotype,none": 0.6527777777777778,
						"pct_stereotype_stderr,none": 0.056501146768529645
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.279891304347826,
						"likelihood_diff_stderr,none": 0.16409318203589687,
						"pct_stereotype,none": 0.48478260869565215,
						"pct_stereotype_stderr,none": 0.023327190181139233
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.267391304347826,
						"likelihood_diff_stderr,none": 0.2746907584324881,
						"pct_stereotype,none": 0.6869565217391305,
						"pct_stereotype_stderr,none": 0.043432470166108225
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.1565934065934065,
						"likelihood_diff_stderr,none": 0.3153493022224966,
						"pct_stereotype,none": 0.7912087912087912,
						"pct_stereotype_stderr,none": 0.04284305206509431
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.970025510204082,
						"likelihood_diff_stderr,none": 0.25378297036218095,
						"pct_stereotype,none": 0.6989795918367347,
						"pct_stereotype_stderr,none": 0.03284830105527338
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05216535433070866,
						"exact_match_stderr,none": 0.004934037077281569
					},
					"glue": {
						"acc,none": 0.7439256788947117,
						"acc_stderr,none": 0.004486672404747918,
						"alias": "glue",
						"f1,none": 0.7260010895764197,
						"f1_stderr,none": 7.71937501333283e-05,
						"mcc,none": 0.2004665069808333,
						"mcc_stderr,none": 0.03232600167796314
					},
					"hellaswag": {
						"acc,none": 0.5599482174865564,
						"acc_norm,none": 0.7492531368253336,
						"acc_norm_stderr,none": 0.0043255721037532896,
						"acc_stderr,none": 0.004953787146510935,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.26393300606410625,
						"acc_norm,none": 0.26393300606410625,
						"acc_norm_stderr,none": 0.025680395024354815,
						"acc_stderr,none": 0.025680395024354815,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809958,
						"acc_stderr,none": 0.013963164754809958,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.014236526215291359,
						"acc_stderr,none": 0.014236526215291359,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.01360735683959812,
						"acc_stderr,none": 0.01360735683959812,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.014356395999905689,
						"acc_stderr,none": 0.014356395999905689,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.24666666666666667,
						"acc_norm,none": 0.24666666666666667,
						"acc_norm_stderr,none": 0.017613084291727022,
						"acc_stderr,none": 0.017613084291727022,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.249,
						"acc_norm,none": 0.249,
						"acc_norm_stderr,none": 0.013681600278702306,
						"acc_stderr,none": 0.013681600278702306,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.334,
						"acc_norm,none": 0.334,
						"acc_norm_stderr,none": 0.014922019523732961,
						"acc_stderr,none": 0.014922019523732961,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.013825416526895026,
						"acc_stderr,none": 0.013825416526895026,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.030695456590127176,
						"acc_stderr,none": 0.030695456590127176,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.289,
						"acc_norm,none": 0.289,
						"acc_norm_stderr,none": 0.014341711358296172,
						"acc_stderr,none": 0.014341711358296172,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2076923076923077,
						"acc_norm,none": 0.2076923076923077,
						"acc_norm_stderr,none": 0.03571595663393522,
						"acc_stderr,none": 0.03571595663393522,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.044619604333847394,
						"acc_stderr,none": 0.044619604333847394,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314128,
						"acc_stderr,none": 0.013644675781314128,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.286,
						"acc_norm,none": 0.286,
						"acc_norm_stderr,none": 0.01429714686251791,
						"acc_stderr,none": 0.01429714686251791,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809947,
						"acc_stderr,none": 0.013963164754809947,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.013569640199177446,
						"acc_stderr,none": 0.013569640199177446,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145143,
						"acc_stderr,none": 0.013979965645145143,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.01379003862087284,
						"acc_stderr,none": 0.01379003862087284,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145155,
						"acc_stderr,none": 0.013979965645145155,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440474,
						"acc_stderr,none": 0.013946271849440474,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.04163331998932269,
						"acc_stderr,none": 0.04163331998932269,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145141,
						"acc_stderr,none": 0.013979965645145141,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361428,
						"acc_stderr,none": 0.014498627873361428,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729474,
						"acc_stderr,none": 0.014013292702729474,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.01339490288966001,
						"acc_stderr,none": 0.01339490288966001,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881424,
						"acc_stderr,none": 0.013588548437881424,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.013550631705555951,
						"acc_stderr,none": 0.013550631705555951,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.018444294148717365,
						"acc_stderr,none": 0.018444294148717365,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.013550631705555958,
						"acc_stderr,none": 0.013550631705555958,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633918,
						"acc_stderr,none": 0.014046255632633918,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.013790038620872839,
						"acc_stderr,none": 0.013790038620872839,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986184,
						"acc_stderr,none": 0.014062601350986184,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.04163331998932269,
						"acc_stderr,none": 0.04163331998932269,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.02469885513168685,
						"acc_stderr,none": 0.02469885513168685,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.013569640199177445,
						"acc_stderr,none": 0.013569640199177445,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462623,
						"acc_stderr,none": 0.014078856992462623,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.253,
						"acc_norm,none": 0.253,
						"acc_norm_stderr,none": 0.01375427861358708,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.03089738243291862,
						"acc_stderr,none": 0.03089738243291862,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.238,
						"acc_norm,none": 0.238,
						"acc_norm_stderr,none": 0.01347358666196723,
						"acc_stderr,none": 0.01347358666196723,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986189,
						"acc_stderr,none": 0.014062601350986189,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.02960162633044061,
						"acc_stderr,none": 0.02960162633044061,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.311,
						"acc_norm,none": 0.311,
						"acc_norm_stderr,none": 0.014645596385722694,
						"acc_stderr,none": 0.014645596385722694,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5755316816487612,
						"acc_norm,none": 0.54,
						"acc_norm_stderr,none": 0.0004977955911823678,
						"acc_stderr,none": 0.06205627133843412,
						"alias": "kobest",
						"f1,none": 0.5530043869539744,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.6082621082621082,
						"acc_stderr,none": 0.013032097988666393,
						"alias": " - kobest_boolq",
						"f1,none": 0.5573871973587674,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.635,
						"acc_stderr,none": 0.015231776226264893,
						"alias": " - kobest_copa",
						"f1,none": 0.634121525783406,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.428,
						"acc_norm,none": 0.54,
						"acc_norm_stderr,none": 0.022311333245289663,
						"acc_stderr,none": 0.022149790663861926,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.42439478845324413,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.7178841309823678,
						"acc_stderr,none": 0.022614830739113155,
						"alias": " - kobest_sentineg",
						"f1,none": 0.7105961989065348,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.5055555555555555,
						"acc_stderr,none": 0.014090627347032371,
						"alias": " - kobest_wic",
						"f1,none": 0.48512362922971874,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7265670483213662,
						"acc_stderr,none": 0.014492379122331596,
						"alias": "lambada",
						"perplexity,none": 3.385994977942082,
						"perplexity_stderr,none": 0.1478991564303529
					},
					"lambada_cloze": {
						"acc,none": 0.07723656122647002,
						"acc_stderr,none": 0.009489042126827388,
						"alias": "lambada_cloze",
						"perplexity,none": 173.8268707381544,
						"perplexity_stderr,none": 5.6239198695865635
					},
					"lambada_multilingual": {
						"acc,none": 0.5541238113720163,
						"acc_stderr,none": 0.08228397169239214,
						"alias": "lambada_multilingual",
						"perplexity,none": 18.83756081520726,
						"perplexity_stderr,none": 7.364939712028874
					},
					"lambada_openai": {
						"acc,none": 0.7500485154279061,
						"acc_stderr,none": 0.006032323323255986,
						"alias": " - lambada_openai",
						"perplexity,none": 3.1231306730085664,
						"perplexity_stderr,none": 0.060657249606188
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.05977100718028333,
						"acc_stderr,none": 0.003302738425977854,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 175.27755973484588,
						"perplexity_stderr,none": 5.787086139411937
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.44323694934989327,
						"acc_stderr,none": 0.006920942710141903,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 31.36961678642296,
						"perplexity_stderr,none": 1.7449488724394175
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7527653793906462,
						"acc_stderr,none": 0.006010305315759311,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.122154236418865,
						"perplexity_stderr,none": 0.06069446081445905
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4737046380749078,
						"acc_stderr,none": 0.00695633779153668,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 25.042039621913865,
						"perplexity_stderr,none": 1.2279515491112283
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5666601979429459,
						"acc_stderr,none": 0.0069037923068605445,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 14.869726239634026,
						"perplexity_stderr,none": 0.7195204040047735
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5342518921016883,
						"acc_stderr,none": 0.006949613576318102,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 19.784267191646578,
						"perplexity_stderr,none": 1.0457785188460769
					},
					"lambada_standard": {
						"acc,none": 0.7003687172520862,
						"acc_stderr,none": 0.006382179569794072,
						"alias": " - lambada_standard",
						"perplexity,none": 3.6502116742417114,
						"perplexity_stderr,none": 0.07182051048711009
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.0947021152726567,
						"acc_stderr,none": 0.0040793189739294095,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 172.37618174146291,
						"perplexity_stderr,none": 5.359129496279082
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.34478371501272265,
						"exact_match_stderr,get-answer": 0.011991613472848755
					},
					"logiqa": {
						"acc,none": 0.2457757296466974,
						"acc_norm,none": 0.30261136712749614,
						"acc_norm_stderr,none": 0.018018696598158843,
						"acc_stderr,none": 0.016887410894296965,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.26208651399491095,
						"acc_norm,none": 0.28880407124681934,
						"acc_norm_stderr,none": 0.011434263441269486,
						"acc_stderr,none": 0.011095246835491722,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25896147403685094,
						"acc_norm,none": 0.26164154103852594,
						"acc_norm_stderr,none": 0.008046139671905343,
						"acc_stderr,none": 0.008019338828219928,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3419826307985596,
						"acc_stderr,none": 0.004882156585093113,
						"alias": "mc_taco",
						"f1,none": 0.5049007889074827,
						"f1_stderr,none": 0.0054687066918815905
					},
					"medmcqa": {
						"acc,none": 0.3822615347836481,
						"acc_norm,none": 0.3822615347836481,
						"acc_norm_stderr,none": 0.007514335952905339,
						"acc_stderr,none": 0.007514335952905339,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3904163393558523,
						"acc_norm,none": 0.3904163393558523,
						"acc_norm_stderr,none": 0.01367845656474356,
						"acc_stderr,none": 0.01367845656474356,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.4383990884489389,
						"acc_stderr,none": 0.10033766244215783,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4740740740740741,
						"acc_stderr,none": 0.04313531696750575,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.4144736842105263,
						"acc_stderr,none": 0.04008973785779206,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.04988876515698589,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.47547169811320755,
						"acc_stderr,none": 0.030735822206205608,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.5069444444444444,
						"acc_stderr,none": 0.04180806750294938,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720683,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.4161849710982659,
						"acc_stderr,none": 0.03758517775404947,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.27450980392156865,
						"acc_stderr,none": 0.044405219061793275,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.050251890762960605,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3872340425531915,
						"acc_stderr,none": 0.03184389265339525,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.30701754385964913,
						"acc_stderr,none": 0.043391383225798594,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.496551724137931,
						"acc_stderr,none": 0.041665675771015785,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.3148148148148148,
						"acc_stderr,none": 0.023919984164047746,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.0404061017820884,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4935483870967742,
						"acc_stderr,none": 0.028441638233540515,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.35960591133004927,
						"acc_stderr,none": 0.033764582465095665,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.0498887651569859,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5636363636363636,
						"acc_stderr,none": 0.03872592983524754,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.48484848484848486,
						"acc_stderr,none": 0.03560716516531061,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.616580310880829,
						"acc_stderr,none": 0.03508984236295343,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.41794871794871796,
						"acc_stderr,none": 0.025007329882461217,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.027940457136228405,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3865546218487395,
						"acc_stderr,none": 0.03163145807552379,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2980132450331126,
						"acc_stderr,none": 0.037345356767871984,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5577981651376147,
						"acc_stderr,none": 0.02129361320752021,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.26851851851851855,
						"acc_stderr,none": 0.030225226160012407,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5735294117647058,
						"acc_stderr,none": 0.03471157907953427,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.6666666666666666,
						"acc_stderr,none": 0.03068582059661081,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4618834080717489,
						"acc_stderr,none": 0.03346015011973228,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5572519083969466,
						"acc_stderr,none": 0.04356447202665069,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.40488841657810837,
						"acc_stderr,none": 0.11008578713788937,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.48760330578512395,
						"acc_stderr,none": 0.04562951548180765,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5462962962962963,
						"acc_stderr,none": 0.04812917324536823,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.4171779141104294,
						"acc_stderr,none": 0.038741028598180814,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.25892857142857145,
						"acc_stderr,none": 0.04157751539865629,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5436893203883495,
						"acc_stderr,none": 0.04931801994220416,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6965811965811965,
						"acc_stderr,none": 0.030118210106942645,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.05024183937956912,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.6104725415070242,
						"acc_stderr,none": 0.017438082556264597,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.4421965317919075,
						"acc_stderr,none": 0.026738603643807403,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.21340782122905028,
						"acc_stderr,none": 0.013702859932196089,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.47058823529411764,
						"acc_stderr,none": 0.028580341065138293,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.4962986803990988,
						"acc_stderr,none": 0.08634005899574593,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.5241157556270096,
						"acc_stderr,none": 0.028365041542564577,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.49074074074074076,
						"acc_stderr,none": 0.027815973433878014,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.02812163604063989,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.35071707953063885,
						"acc_stderr,none": 0.012187773370741522,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4411764705882353,
						"acc_stderr,none": 0.030161911930767102,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.45098039215686275,
						"acc_stderr,none": 0.020130388312904524,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4909090909090909,
						"acc_stderr,none": 0.04788339768702861,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.4448979591836735,
						"acc_stderr,none": 0.031814251181977865,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.49821254468638276,
						"acc_stderr,none": 0.08020096682742794,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.7213930348258707,
						"acc_stderr,none": 0.03170056183497309,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.37297811607992387,
						"acc_stderr,none": 0.08288923614920533,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.67,
						"acc_stderr,none": 0.04725815626252609,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3674698795180723,
						"acc_stderr,none": 0.03753267402120575,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6491228070175439,
						"acc_stderr,none": 0.03660298834049163,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7883851248089658,
						"acc_stderr,none": 0.0041230564433915855,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7828519121236778,
						"acc_stderr,none": 0.004158330130094683,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6936274509803921,
						"acc_stderr,none": 0.02285024477026493,
						"alias": "mrpc",
						"f1,none": 0.8153618906942393,
						"f1_stderr,none": 0.016216526338492757
					},
					"multimedqa": {
						"acc,none": 0.4107877927608233,
						"acc_norm,none": 0.3841349722778492,
						"acc_norm_stderr,none": 0.00010642961870287373,
						"acc_stderr,none": 0.05381311790660645,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5614686468646864,
						"acc_stderr,none": 0.007127325111557941,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7148231767437528,
						"mrr_stderr,none": 0.010312593062411736,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4040632054176072,
						"r@2_stderr,none": 0.016495030288906053
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6600827705913958,
						"mrr_stderr,none": 0.010433975808709943,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4717832957110609,
						"r@2_stderr,none": 0.01678053141516135
					},
					"openbookqa": {
						"acc,none": 0.304,
						"acc_norm,none": 0.424,
						"acc_norm_stderr,none": 0.022122993778135404,
						"acc_stderr,none": 0.020591649571224932,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.3945,
						"acc_stderr,none": 0.010931359582007931,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3775,
						"acc_stderr,none": 0.010842308463902531,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.404,
						"acc_stderr,none": 0.010975072943404668,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5445,
						"acc_stderr,none": 0.011138757154883975,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5185,
						"acc_stderr,none": 0.011175478542788579,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5235,
						"acc_stderr,none": 0.011170777418517835,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5295,
						"acc_stderr,none": 0.011163654804511657,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4702857142857143,
						"acc_stderr,none": 0.051808977072434335,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7763873775843307,
						"acc_norm,none": 0.7829162132752993,
						"acc_norm_stderr,none": 0.00961870841575678,
						"acc_stderr,none": 0.009721489519176299,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.26489111870196413,
						"acc_norm,none": 0.295634073441503,
						"acc_norm_stderr,none": 0.0033338769350872446,
						"acc_stderr,none": 0.003223908333758172,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.02193084412072851,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7524294879202531,
						"acc_norm,none": 0.622951179453004,
						"acc_norm_stderr,none": 0.007880964636376735,
						"acc_stderr,none": 0.1504210776127774,
						"alias": "pythia",
						"bits_per_byte,none": 0.6393202013936226,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5575950468323259,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.1231306730085664,
						"perplexity_stderr,none": 0.060657249606188,
						"word_perplexity,none": 10.694026261310556,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3723404255319149,
						"acc_norm,none": 0.425531914893617,
						"acc_norm_stderr,none": 0.054361997567311325,
						"acc_stderr,none": 0.04760751849429055,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.44166666666666665,
						"acc_norm,none": 0.525,
						"acc_norm_stderr,none": 0.045777595341980594,
						"acc_stderr,none": 0.04552192400253557,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.31875,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.03885143449429052,
						"acc_stderr,none": 0.036955560385363254,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3732394366197183,
						"acc_norm,none": 0.397887323943662,
						"acc_norm_stderr,none": 0.029095492917064897,
						"acc_stderr,none": 0.028750895488989205,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7547860499628989,
						"acc_stderr,none": 0.002139626752564166,
						"alias": "qqp",
						"f1,none": 0.7252521893359938,
						"f1_stderr,none": 0.0026508622655465794
					},
					"race": {
						"acc,none": 0.3464114832535885,
						"acc_stderr,none": 0.014726451021782801,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.7003610108303249,
						"acc_stderr,none": 0.02757437014529261,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.93,
						"acc_norm,none": 0.931,
						"acc_norm_stderr,none": 0.008018934050315151,
						"acc_stderr,none": 0.008072494358323506,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6931407942238267,
						"acc_stderr,none": 0.02776040303805897,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.9048165137614679,
						"acc_stderr,none": 0.009943790947096227,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5907227831650504,
						"acc_norm,none": 0.7778666400079977,
						"acc_norm_stderr,none": 0.0029389364031458787,
						"acc_stderr,none": 0.0034764130175901392,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.669095870353732,
						"acc_stderr,none": 0.07259776552887541,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5823317307692307,
						"acc_stderr,none": 0.004935946380807059,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8587209891557718,
						"acc_stderr,none": 0.003506665223133944,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5705882352941176,
						"acc_stderr,none": 0.00490139360631807,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.34013043916503455,
						"acc_stderr,none": 0.0015214412084548238,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3402692778457772,
						"bleu_acc_stderr,none": 0.016586304901762553,
						"bleu_diff,none": -6.3885216667008216,
						"bleu_diff_stderr,none": 0.8237728208335762,
						"bleu_max,none": 25.848948085807137,
						"bleu_max_stderr,none": 0.7864128636857829,
						"rouge1_acc,none": 0.30966952264381886,
						"rouge1_acc_stderr,none": 0.016185744355144912,
						"rouge1_diff,none": -8.407910889709958,
						"rouge1_diff_stderr,none": 0.9109032325137808,
						"rouge1_max,none": 51.46798847863768,
						"rouge1_max_stderr,none": 0.8544738802487006,
						"rouge2_acc,none": 0.27539779681762544,
						"rouge2_acc_stderr,none": 0.015638135667775523,
						"rouge2_diff,none": -9.952391260329206,
						"rouge2_diff_stderr,none": 1.0960462809375815,
						"rouge2_max,none": 35.498492375426586,
						"rouge2_max_stderr,none": 0.9955292745738608,
						"rougeL_acc,none": 0.3108935128518972,
						"rougeL_acc_stderr,none": 0.016203316673559693,
						"rougeL_diff,none": -8.54103962629304,
						"rougeL_diff_stderr,none": 0.9259882554244762,
						"rougeL_max,none": 48.57435182019648,
						"rougeL_max_stderr,none": 0.868054036707988
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3402692778457772,
						"bleu_acc_stderr,none": 0.016586304901762553,
						"bleu_diff,none": -6.3885216667008216,
						"bleu_diff_stderr,none": 0.8237728208335762,
						"bleu_max,none": 25.848948085807137,
						"bleu_max_stderr,none": 0.7864128636857829,
						"rouge1_acc,none": 0.30966952264381886,
						"rouge1_acc_stderr,none": 0.016185744355144912,
						"rouge1_diff,none": -8.407910889709958,
						"rouge1_diff_stderr,none": 0.9109032325137808,
						"rouge1_max,none": 51.46798847863768,
						"rouge1_max_stderr,none": 0.8544738802487006,
						"rouge2_acc,none": 0.27539779681762544,
						"rouge2_acc_stderr,none": 0.015638135667775523,
						"rouge2_diff,none": -9.952391260329206,
						"rouge2_diff_stderr,none": 1.0960462809375815,
						"rouge2_max,none": 35.498492375426586,
						"rouge2_max_stderr,none": 0.9955292745738608,
						"rougeL_acc,none": 0.3108935128518972,
						"rougeL_acc_stderr,none": 0.016203316673559693,
						"rougeL_diff,none": -8.54103962629304,
						"rougeL_diff_stderr,none": 0.9259882554244762,
						"rougeL_max,none": 48.57435182019648,
						"rougeL_max_stderr,none": 0.868054036707988
					},
					"truthfulqa_mc1": {
						"acc,none": 0.26805385556915545,
						"acc_stderr,none": 0.015506204722834553,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4122070227609136,
						"acc_stderr,none": 0.014268999975578912,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.05216535433070866,
						"exact_match_stderr,none": 0.004934037077281569
					},
					"wic": {
						"acc,none": 0.5219435736677116,
						"acc_stderr,none": 0.019791633564310455,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6393202013936226,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5575950468323259,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.694026261310556,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7332280978689818,
						"acc_stderr,none": 0.012430046102144333,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4788732394366197,
						"acc_stderr,none": 0.05970805879899504,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8571428571428571,
						"acc_stderr,none": 0.021217447349500138,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6225454545454545,
						"acc_stderr,none": 0.07069309770879728,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.021728881438701705,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.022361396739207867,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.02059164957122493,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.0195369235747476,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.02224224437573102,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.562,
						"acc_stderr,none": 0.022210326363977417,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228137,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.644,
						"acc_stderr,none": 0.021434712356072666,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.020354375480530085,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.712,
						"acc_stderr,none": 0.02027150383507522,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43815261044176707,
						"acc_stderr,none": 0.04704126251631503,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939166,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4755020080321285,
						"acc_stderr,none": 0.010010036112667854,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4835341365461847,
						"acc_stderr,none": 0.010016636930829975,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.4108433734939759,
						"acc_stderr,none": 0.009861456841490835,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5353413654618474,
						"acc_stderr,none": 0.009997006138567242,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4682730923694779,
						"acc_stderr,none": 0.010001876146466708,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4899598393574297,
						"acc_stderr,none": 0.010020052116889137,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42771084337349397,
						"acc_stderr,none": 0.009916774564942348,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4843373493975904,
						"acc_stderr,none": 0.010017154458106753,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.41566265060240964,
						"acc_stderr,none": 0.00987847434182292,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42610441767068274,
						"acc_stderr,none": 0.00991201637745907,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4562248995983936,
						"acc_stderr,none": 0.00998358919769393,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40441767068273093,
						"acc_stderr,none": 0.009837245625453,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41285140562248995,
						"acc_stderr,none": 0.00986866594308441,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3469879518072289,
						"acc_stderr,none": 0.009541251561568397,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6329944046687925,
						"acc_stderr,none": 0.054330125954133716,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5923229649238915,
						"acc_stderr,none": 0.012645876488040303,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.771674387822634,
						"acc_stderr,none": 0.010802042577302285,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7233620119126406,
						"acc_stderr,none": 0.011511854288593795,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5784248841826605,
						"acc_stderr,none": 0.012707862131801903,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6022501654533422,
						"acc_stderr,none": 0.012595197856703514,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6737260092653872,
						"acc_stderr,none": 0.012065474625979069,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5420251489080079,
						"acc_stderr,none": 0.012821595164245275,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.686962276637988,
						"acc_stderr,none": 0.011933732786576634,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5592322964923891,
						"acc_stderr,none": 0.01277651858633279,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5830575777630708,
						"acc_stderr,none": 0.012688354121607803,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6499007279947054,
						"acc_stderr,none": 0.012275258369751086,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8154641492470218,
						"acc_stderr,none": 0.03811008839661942,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8761290322580645,
						"acc_stderr,none": 0.00683361864926894,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.0506639425494172,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7372262773722628,
						"acc_stderr,none": 0.01422029531609415,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.8136882129277566,
						"acc_stderr,none": 0.024054621770299663,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6952380952380952,
						"acc_stderr,none": 0.02597659935230537,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7797619047619048,
						"acc_stderr,none": 0.018477501049056294,
						"alias": " - xwinograd_zh"
					}
				}
			},
			"dtype=float16,trust_remote_code=True": {
				"confObj": {
					"dtype": "float16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=float16,trust_remote_code=True",
				"groups": {
					"mmlu": {
						"acc,none": 0.38911835920809,
						"acc_stderr,none": 0.07841270364162685,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.37577045696068007,
						"acc_stderr,none": 0.09253127657638455,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.42484711940778885,
						"acc_stderr,none": 0.06195545679428354,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4192395190120247,
						"acc_stderr,none": 0.0627096382761993,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.34443387250237867,
						"acc_stderr,none": 0.06833100082079437,
						"alias": " - stem"
					}
				},
				"results": {
					"arc_challenge": {
						"acc,none": 0.43600682593856654,
						"acc_norm,none": 0.4863481228668942,
						"acc_norm_stderr,none": 0.01460594342986095,
						"acc_stderr,none": 0.014491225699230916,
						"alias": "arc_challenge"
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.15314632297194844,
						"exact_match_stderr,get-answer": 0.009919728152791468
					},
					"hellaswag": {
						"acc,none": 0.5538737303326031,
						"acc_norm,none": 0.7485560645289783,
						"acc_norm_stderr,none": 0.004329565016527318,
						"acc_stderr,none": 0.0049607323822552386,
						"alias": "hellaswag"
					},
					"mmlu": {
						"acc,none": 0.38911835920809,
						"acc_stderr,none": 0.07841270364162685,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768079,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.04232073695151589,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3618421052631579,
						"acc_stderr,none": 0.03910525752849726,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.049431107042371025,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.41509433962264153,
						"acc_stderr,none": 0.03032594578928611,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3263888888888889,
						"acc_stderr,none": 0.03921067198982266,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.04975698519562428,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3930635838150289,
						"acc_stderr,none": 0.03724249595817729,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.04389869956808778,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.050211673156867795,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3404255319148936,
						"acc_stderr,none": 0.030976692998534422,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2982456140350877,
						"acc_stderr,none": 0.04303684033537315,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4206896551724138,
						"acc_stderr,none": 0.0411391498118926,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.31216931216931215,
						"acc_stderr,none": 0.023865206836972585,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2698412698412698,
						"acc_stderr,none": 0.03970158273235173,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.45483870967741935,
						"acc_stderr,none": 0.028327743091561074,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.32019704433497537,
						"acc_stderr,none": 0.032826493853041504,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.048783173121456316,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5818181818181818,
						"acc_stderr,none": 0.03851716319398395,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3838383838383838,
						"acc_stderr,none": 0.03464881675016338,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.47150259067357514,
						"acc_stderr,none": 0.03602573571288441,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3564102564102564,
						"acc_stderr,none": 0.0242831405294673,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2851851851851852,
						"acc_stderr,none": 0.027528599210340492,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3235294117647059,
						"acc_stderr,none": 0.030388353551886838,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.26490066225165565,
						"acc_stderr,none": 0.03603038545360384,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.45871559633027525,
						"acc_stderr,none": 0.021364122533881685,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.3101851851851852,
						"acc_stderr,none": 0.03154696285656628,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5098039215686274,
						"acc_stderr,none": 0.035086373586305716,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.6160337552742616,
						"acc_stderr,none": 0.031658678064106674,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.452914798206278,
						"acc_stderr,none": 0.03340867501923323,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.549618320610687,
						"acc_stderr,none": 0.04363643698524779,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.37577045696068007,
						"acc_stderr,none": 0.09253127657638455,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.47107438016528924,
						"acc_stderr,none": 0.04556710331269498,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.42592592592592593,
						"acc_stderr,none": 0.0478034362693679,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.4601226993865031,
						"acc_stderr,none": 0.039158572914369714,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2767857142857143,
						"acc_stderr,none": 0.042466243366976235,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4854368932038835,
						"acc_stderr,none": 0.049486373240266376,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.5641025641025641,
						"acc_stderr,none": 0.03248577511578401,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145633,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.49680715197956576,
						"acc_stderr,none": 0.01787959894593307,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3786127167630058,
						"acc_stderr,none": 0.026113749361310338,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.25027932960893856,
						"acc_stderr,none": 0.014487500852850412,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.35947712418300654,
						"acc_stderr,none": 0.027475969910660952,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.42484711940778885,
						"acc_stderr,none": 0.06195545679428354,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3311897106109325,
						"acc_stderr,none": 0.026730620728004913,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4444444444444444,
						"acc_stderr,none": 0.02764847787741332,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.3049645390070922,
						"acc_stderr,none": 0.027464708442022135,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.34028683181225555,
						"acc_stderr,none": 0.012101217610223768,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3897058823529412,
						"acc_stderr,none": 0.029624663581159696,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3888888888888889,
						"acc_stderr,none": 0.01972205893961806,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.43636363636363634,
						"acc_stderr,none": 0.04750185058907296,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.4163265306122449,
						"acc_stderr,none": 0.03155782816556164,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4192395190120247,
						"acc_stderr,none": 0.0627096382761993,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5124378109452736,
						"acc_stderr,none": 0.0353443984853958,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.34443387250237867,
						"acc_stderr,none": 0.06833100082079437,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3313253012048193,
						"acc_stderr,none": 0.036643147772880864,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.5029239766081871,
						"acc_stderr,none": 0.03834759370936839,
						"alias": "  - world_religions"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.41264586714476936,
						"acc_stderr,none": 0.014276984290395416,
						"alias": "truthfulqa_mc2"
					},
					"winogrande": {
						"acc,none": 0.7300710339384373,
						"acc_stderr,none": 0.012476433372002617,
						"alias": "winogrande"
					}
				}
			}
		},
		"name": "RWKV/v5-EagleX-v2-7B-HF"
	},
	"RWKV/v6-Finch-14B-HF": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6434611048478016,
						"acc_norm,none": 0.6474069898534386,
						"acc_norm_stderr,none": 0.08294267128829642,
						"acc_stderr,none": 0.1021974672609882,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4184375,
						"acc_stderr,none": 0.02377134678599452,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8320746268656717,
						"acc_stderr,none": 0.15005739769252588,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.4658090139872216,
						"acc_norm,none": 0.4658090139872216,
						"acc_norm_stderr,none": 0.10573508008788689,
						"acc_stderr,none": 0.10573508008788689,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.6505181038589805,
						"acc_stderr,none": 0.013284051575929365,
						"alias": "glue",
						"f1,none": 0.7302521829828232,
						"f1_stderr,none": 0.00010785969379177274,
						"mcc,none": 0.021372296856483588,
						"mcc_stderr,none": 0.03184807423340848
					},
					"lambada": {
						"acc,none": 0.740830584125752,
						"acc_stderr,none": 0.013100405567493572,
						"alias": "lambada",
						"perplexity,none": 3.2969303534251133,
						"perplexity_stderr,none": 0.14931021471135678
					},
					"lambada_multilingual": {
						"acc,none": 0.5691830001940618,
						"acc_stderr,none": 0.08201479170677795,
						"alias": "lambada_multilingual",
						"perplexity,none": 16.88833298599082,
						"perplexity_stderr,none": 6.499820664740463
					},
					"mmlu": {
						"acc,none": 0.523572140720695,
						"acc_stderr,none": 0.12685070914656993,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.48310308182784273,
						"acc_stderr,none": 0.14724337638558094,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.5954296749275829,
						"acc_stderr,none": 0.0883142346602916,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.5983100422489438,
						"acc_stderr,none": 0.09511201682945745,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.44021566761814146,
						"acc_stderr,none": 0.11380993093278662,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.42707142857142855,
						"acc_stderr,none": 0.060223946611407994,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7702514701611791,
						"acc_norm,none": 0.6508724136223338,
						"acc_norm_stderr,none": 0.008770227594569864,
						"acc_stderr,none": 0.14777088013311926,
						"alias": "pythia",
						"bits_per_byte,none": 0.6026420823429972,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5184949202602755,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.0242505065529364,
						"perplexity_stderr,none": 0.0550133363375002,
						"word_perplexity,none": 9.33466849451017,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3862826440381463,
						"acc_stderr,none": 0.001397113804422153,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.4369645042839657,
						"bleu_acc_stderr,none": 0.017363844503195974,
						"bleu_diff,none": -1.993836339002286,
						"bleu_diff_stderr,none": 0.9263067134836416,
						"bleu_max,none": 28.649825911043322,
						"bleu_max_stderr,none": 0.8275912248060677,
						"rouge1_acc,none": 0.43818849449204406,
						"rouge1_acc_stderr,none": 0.017369236164404434,
						"rouge1_diff,none": -1.7142257235369747,
						"rouge1_diff_stderr,none": 1.0706172290271831,
						"rouge1_max,none": 54.97643298355719,
						"rouge1_max_stderr,none": 0.8584482932194394,
						"rouge2_acc,none": 0.3561811505507956,
						"rouge2_acc_stderr,none": 0.01676379072844633,
						"rouge2_diff,none": -3.2576316639215595,
						"rouge2_diff_stderr,none": 1.2592548519807907,
						"rouge2_max,none": 39.32502950857435,
						"rouge2_max_stderr,none": 1.0500412403817407,
						"rougeL_acc,none": 0.4186046511627907,
						"rougeL_acc_stderr,none": 0.01727001528447686,
						"rougeL_diff,none": -1.8018984569913754,
						"rougeL_diff_stderr,none": 1.0836529851359833,
						"rougeL_max,none": 52.02015692571969,
						"rougeL_max_stderr,none": 0.8751242211488833
					},
					"xcopa": {
						"acc,none": 0.6434545454545454,
						"acc_stderr,none": 0.07616959540381336,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4452208835341365,
						"acc_stderr,none": 0.050895952225549296,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6636784790325492,
						"acc_stderr,none": 0.05965518629992475,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8332209485277591,
						"acc_stderr,none": 0.0365370654435725,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6434611048478016,
						"acc_norm,none": 0.6474069898534386,
						"acc_norm_stderr,none": 0.08294267128829642,
						"acc_stderr,none": 0.1021974672609882,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4184375,
						"acc_stderr,none": 0.02377134678599452,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.458,
						"acc_stderr,none": 0.015763390640483703,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.398,
						"acc_stderr,none": 0.015486634102858922,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.4025,
						"acc_stderr,none": 0.01416257770226319,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4274744027303754,
						"acc_norm,none": 0.4726962457337884,
						"acc_norm_stderr,none": 0.014589589101986,
						"acc_stderr,none": 0.014456862944650649,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.75,
						"acc_norm,none": 0.7335858585858586,
						"acc_norm_stderr,none": 0.009071357971078687,
						"acc_stderr,none": 0.008885233166386385,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8320746268656717,
						"acc_stderr,none": 0.15005739769252588,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847165,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565834,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578154,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.82,
						"acc_stderr,none": 0.012155153135511963,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592085,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.766,
						"acc_stderr,none": 0.013394902889660013,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.594,
						"acc_stderr,none": 0.015537226438634604,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.013699915608779773,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.01035486471293671,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045057,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689102,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.00696042006257142,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697057,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.00682976175614092,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524296,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523705,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.003845749574503,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280311,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.013019735539307799,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.01301973553930782,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319329,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832023,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.82,
						"acc_stderr,none": 0.012155153135511961,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329814,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.261,
						"acc_stderr,none": 0.01389503767796513,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787738,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.012841374572096916,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.685,
						"acc_stderr,none": 0.014696631960792496,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.011884495834541643,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333342,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651506,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897896,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240663,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.665,
						"acc_stderr,none": 0.014933117490932575,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381802,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.621,
						"acc_stderr,none": 0.015349091002225349,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.621,
						"acc_stderr,none": 0.015349091002225347,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.014414290540008222,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847165,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.736,
						"acc_stderr,none": 0.013946271849440474,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785126,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400243,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.802,
						"acc_stderr,none": 0.01260773393417531,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033846,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578104,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904609,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.829,
						"acc_stderr,none": 0.011912216456264632,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.591,
						"acc_stderr,none": 0.015555094373257944,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306482,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.003148000938676769,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.67,
						"acc_stderr,none": 0.014876872027456734,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.015818793703510883,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.838,
						"acc_stderr,none": 0.011657267771304412,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.644,
						"acc_stderr,none": 0.015149042659306626,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.011884495834541651,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665546,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286421,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704159,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.00539714082909918,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274702,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.338,
						"acc_stderr,none": 0.014965960710224479,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.252,
						"acc_stderr,none": 0.013736254390651155,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.4658090139872216,
						"acc_norm,none": 0.4658090139872216,
						"acc_norm_stderr,none": 0.10573508008788689,
						"acc_stderr,none": 0.10573508008788689,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.42011834319526625,
						"acc_norm,none": 0.42011834319526625,
						"acc_norm_stderr,none": 0.03808034433196808,
						"acc_stderr,none": 0.03808034433196808,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.3310810810810811,
						"acc_norm,none": 0.3310810810810811,
						"acc_norm_stderr,none": 0.03881461247660828,
						"acc_stderr,none": 0.03881461247660828,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.31097560975609756,
						"acc_norm,none": 0.31097560975609756,
						"acc_norm_stderr,none": 0.03625656529444608,
						"acc_stderr,none": 0.03625656529444608,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.6375,
						"acc_norm,none": 0.6375,
						"acc_norm_stderr,none": 0.038123743406448904,
						"acc_stderr,none": 0.038123743406448904,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3515151515151515,
						"acc_norm,none": 0.3515151515151515,
						"acc_norm_stderr,none": 0.0372820699868265,
						"acc_stderr,none": 0.0372820699868265,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.5023923444976076,
						"acc_norm,none": 0.5023923444976076,
						"acc_norm_stderr,none": 0.03466836542150577,
						"acc_stderr,none": 0.03466836542150577,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.41875,
						"acc_norm,none": 0.41875,
						"acc_norm_stderr,none": 0.03912553875691511,
						"acc_stderr,none": 0.03912553875691511,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.5648854961832062,
						"acc_norm,none": 0.5648854961832062,
						"acc_norm_stderr,none": 0.04348208051644858,
						"acc_stderr,none": 0.04348208051644858,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.4117647058823529,
						"acc_norm,none": 0.4117647058823529,
						"acc_norm_stderr,none": 0.04235778234253509,
						"acc_stderr,none": 0.04235778234253509,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.5607476635514018,
						"acc_norm,none": 0.5607476635514018,
						"acc_norm_stderr,none": 0.048204529006379074,
						"acc_stderr,none": 0.048204529006379074,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.5603715170278638,
						"acc_norm,none": 0.5603715170278638,
						"acc_norm_stderr,none": 0.02766005258680519,
						"acc_stderr,none": 0.02766005258680519,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.37254901960784315,
						"acc_norm,none": 0.37254901960784315,
						"acc_norm_stderr,none": 0.03393388584958404,
						"acc_stderr,none": 0.03393388584958404,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.5586592178770949,
						"acc_norm,none": 0.5586592178770949,
						"acc_norm_stderr,none": 0.037217784210805266,
						"acc_stderr,none": 0.037217784210805266,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.4345991561181435,
						"acc_norm,none": 0.4345991561181435,
						"acc_norm_stderr,none": 0.03226759995510145,
						"acc_stderr,none": 0.03226759995510145,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800374,
						"acc_stderr,none": 0.04142972007800374,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.616822429906542,
						"acc_norm,none": 0.616822429906542,
						"acc_norm_stderr,none": 0.04722013080771233,
						"acc_stderr,none": 0.04722013080771233,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.4339622641509434,
						"acc_norm,none": 0.4339622641509434,
						"acc_norm_stderr,none": 0.04836754297823818,
						"acc_stderr,none": 0.04836754297823818,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.37037037037037035,
						"acc_norm,none": 0.37037037037037035,
						"acc_norm_stderr,none": 0.04668408033024931,
						"acc_stderr,none": 0.04668408033024931,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.045521571818039494,
						"acc_stderr,none": 0.045521571818039494,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.42452830188679247,
						"acc_norm,none": 0.42452830188679247,
						"acc_norm_stderr,none": 0.0482359303724347,
						"acc_stderr,none": 0.0482359303724347,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.39194139194139194,
						"acc_norm,none": 0.39194139194139194,
						"acc_norm_stderr,none": 0.0296004854655413,
						"acc_stderr,none": 0.0296004854655413,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.5049019607843137,
						"acc_norm,none": 0.5049019607843137,
						"acc_norm_stderr,none": 0.03509143375606786,
						"acc_stderr,none": 0.03509143375606786,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.6023391812865497,
						"acc_norm,none": 0.6023391812865497,
						"acc_norm_stderr,none": 0.0375363895576169,
						"acc_stderr,none": 0.0375363895576169,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.5102040816326531,
						"acc_norm,none": 0.5102040816326531,
						"acc_norm_stderr,none": 0.04137167622853999,
						"acc_stderr,none": 0.04137167622853999,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.31654676258992803,
						"acc_norm,none": 0.31654676258992803,
						"acc_norm_stderr,none": 0.039594402847357935,
						"acc_stderr,none": 0.039594402847357935,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.4276729559748428,
						"acc_norm,none": 0.4276729559748428,
						"acc_norm_stderr,none": 0.03935949201960897,
						"acc_stderr,none": 0.03935949201960897,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.5337423312883436,
						"acc_norm,none": 0.5337423312883436,
						"acc_norm_stderr,none": 0.039194155450484096,
						"acc_stderr,none": 0.039194155450484096,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.4127906976744186,
						"acc_norm,none": 0.4127906976744186,
						"acc_norm_stderr,none": 0.037649859438232,
						"acc_stderr,none": 0.037649859438232,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.43253968253968256,
						"acc_norm,none": 0.43253968253968256,
						"acc_norm_stderr,none": 0.031271150966052506,
						"acc_stderr,none": 0.031271150966052506,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.03547601494006937,
						"acc_stderr,none": 0.03547601494006937,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.6680672268907563,
						"acc_norm,none": 0.6680672268907563,
						"acc_norm_stderr,none": 0.03058869701378364,
						"acc_stderr,none": 0.03058869701378364,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.36086956521739133,
						"acc_norm,none": 0.36086956521739133,
						"acc_norm_stderr,none": 0.031735996266496556,
						"acc_stderr,none": 0.031735996266496556,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.43703703703703706,
						"acc_norm,none": 0.43703703703703706,
						"acc_norm_stderr,none": 0.042849586397533994,
						"acc_stderr,none": 0.042849586397533994,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.041785323616083794,
						"acc_stderr,none": 0.041785323616083794,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.4715909090909091,
						"acc_norm,none": 0.4715909090909091,
						"acc_norm_stderr,none": 0.03773538873741875,
						"acc_stderr,none": 0.03773538873741875,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.5436241610738255,
						"acc_norm,none": 0.5436241610738255,
						"acc_norm_stderr,none": 0.0409430168096717,
						"acc_stderr,none": 0.0409430168096717,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.4378698224852071,
						"acc_norm,none": 0.4378698224852071,
						"acc_norm_stderr,none": 0.03827686117539365,
						"acc_stderr,none": 0.03827686117539365,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.03986246938961659,
						"acc_stderr,none": 0.03986246938961659,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.5338983050847458,
						"acc_norm,none": 0.5338983050847458,
						"acc_norm_stderr,none": 0.046118660119488855,
						"acc_stderr,none": 0.046118660119488855,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.31097560975609756,
						"acc_norm,none": 0.31097560975609756,
						"acc_norm_stderr,none": 0.0362565652944461,
						"acc_stderr,none": 0.0362565652944461,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.35454545454545455,
						"acc_norm,none": 0.35454545454545455,
						"acc_norm_stderr,none": 0.04582004841505415,
						"acc_stderr,none": 0.04582004841505415,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.5734265734265734,
						"acc_norm,none": 0.5734265734265734,
						"acc_norm_stderr,none": 0.04150416051729389,
						"acc_stderr,none": 0.04150416051729389,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.5079365079365079,
						"acc_norm,none": 0.5079365079365079,
						"acc_norm_stderr,none": 0.044715725362943486,
						"acc_stderr,none": 0.044715725362943486,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.3567567567567568,
						"acc_norm,none": 0.3567567567567568,
						"acc_norm_stderr,none": 0.035315455206482514,
						"acc_stderr,none": 0.035315455206482514,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.5232558139534884,
						"acc_norm,none": 0.5232558139534884,
						"acc_norm_stderr,none": 0.03819457472859224,
						"acc_stderr,none": 0.03819457472859224,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.46228710462287104,
						"acc_norm,none": 0.46228710462287104,
						"acc_norm_stderr,none": 0.024622899193402718,
						"acc_stderr,none": 0.024622899193402718,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.780373831775701,
						"acc_norm,none": 0.780373831775701,
						"acc_norm_stderr,none": 0.028366358642017562,
						"acc_stderr,none": 0.028366358642017562,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.4796747967479675,
						"acc_norm,none": 0.4796747967479675,
						"acc_norm_stderr,none": 0.04523045598338889,
						"acc_stderr,none": 0.04523045598338889,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.45081967213114754,
						"acc_norm,none": 0.45081967213114754,
						"acc_norm_stderr,none": 0.04523412879516007,
						"acc_stderr,none": 0.04523412879516007,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.5095238095238095,
						"acc_norm,none": 0.5095238095238095,
						"acc_norm_stderr,none": 0.034579448570031264,
						"acc_stderr,none": 0.034579448570031264,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.5166666666666667,
						"acc_norm,none": 0.5166666666666667,
						"acc_norm_stderr,none": 0.03735098678123468,
						"acc_stderr,none": 0.03735098678123468,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.6137566137566137,
						"acc_norm,none": 0.6137566137566137,
						"acc_norm_stderr,none": 0.03550992016224306,
						"acc_stderr,none": 0.03550992016224306,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.04524657153808439,
						"acc_stderr,none": 0.04524657153808439,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.43448275862068964,
						"acc_norm,none": 0.43448275862068964,
						"acc_norm_stderr,none": 0.041307408795554966,
						"acc_stderr,none": 0.041307408795554966,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.5714285714285714,
						"acc_norm,none": 0.5714285714285714,
						"acc_norm_stderr,none": 0.04852615860619701,
						"acc_stderr,none": 0.04852615860619701,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.5085714285714286,
						"acc_norm,none": 0.5085714285714286,
						"acc_norm_stderr,none": 0.0378993320697706,
						"acc_stderr,none": 0.0378993320697706,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.3033175355450237,
						"acc_norm,none": 0.3033175355450237,
						"acc_norm_stderr,none": 0.03172170716716874,
						"acc_stderr,none": 0.03172170716716874,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.30319148936170215,
						"acc_norm,none": 0.30319148936170215,
						"acc_norm_stderr,none": 0.02373556600773539,
						"acc_stderr,none": 0.02373556600773539,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.5129310344827587,
						"acc_norm,none": 0.5129310344827587,
						"acc_norm_stderr,none": 0.03288658118872005,
						"acc_stderr,none": 0.03288658118872005,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.4827586206896552,
						"acc_norm,none": 0.4827586206896552,
						"acc_norm_stderr,none": 0.03799168868945868,
						"acc_stderr,none": 0.03799168868945868,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.45925925925925926,
						"acc_norm,none": 0.45925925925925926,
						"acc_norm_stderr,none": 0.04304979692464241,
						"acc_stderr,none": 0.04304979692464241,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.03333333333333334,
						"acc_stderr,none": 0.03333333333333334,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.47878787878787876,
						"acc_norm,none": 0.47878787878787876,
						"acc_norm_stderr,none": 0.03900828913737301,
						"acc_stderr,none": 0.03900828913737301,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.34594594594594597,
						"acc_norm,none": 0.34594594594594597,
						"acc_norm_stderr,none": 0.035067276058462,
						"acc_stderr,none": 0.035067276058462,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.5325443786982249,
						"acc_norm,none": 0.5325443786982249,
						"acc_norm_stderr,none": 0.03849403659869391,
						"acc_stderr,none": 0.03849403659869391,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.6645962732919255,
						"acc_norm,none": 0.6645962732919255,
						"acc_norm_stderr,none": 0.03732526513790708,
						"acc_stderr,none": 0.03732526513790708,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.58125,
						"acc_norm,none": 0.58125,
						"acc_norm_stderr,none": 0.039125538756915115,
						"acc_stderr,none": 0.039125538756915115,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.021372296856483588,
						"mcc_stderr,none": 0.03184807423340848
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.03265986323710906,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.6505181038589805,
						"acc_stderr,none": 0.013284051575929365,
						"alias": "glue",
						"f1,none": 0.7302521829828232,
						"f1_stderr,none": 0.00010785969379177274,
						"mcc,none": 0.021372296856483588,
						"mcc_stderr,none": 0.03184807423340848
					},
					"hellaswag": {
						"acc,none": 0.5876319458275244,
						"acc_norm,none": 0.7875921131248755,
						"acc_norm_stderr,none": 0.0040817604652902,
						"acc_stderr,none": 0.004912547040132879,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.740830584125752,
						"acc_stderr,none": 0.013100405567493572,
						"alias": "lambada",
						"perplexity,none": 3.2969303534251133,
						"perplexity_stderr,none": 0.14931021471135678
					},
					"lambada_multilingual": {
						"acc,none": 0.5691830001940618,
						"acc_stderr,none": 0.08201479170677795,
						"alias": "lambada_multilingual",
						"perplexity,none": 16.88833298599082,
						"perplexity_stderr,none": 6.499820664740463
					},
					"lambada_openai": {
						"acc,none": 0.7640209586648554,
						"acc_stderr,none": 0.00591563575509615,
						"alias": " - lambada_openai",
						"perplexity,none": 3.0242505065529364,
						"perplexity_stderr,none": 0.0550133363375002
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4593440714147099,
						"acc_stderr,none": 0.00694291127539764,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 27.639390308113263,
						"perplexity_stderr,none": 1.5067206876614696
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7647972055113527,
						"acc_stderr,none": 0.00590889751702722,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.0261779498879404,
						"perplexity_stderr,none": 0.0551151972930069
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4810789831166311,
						"acc_stderr,none": 0.006960988162299351,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 23.16831040580189,
						"perplexity_stderr,none": 1.109390265665687
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5895594799146129,
						"acc_stderr,none": 0.006853319847090055,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 13.273666136682731,
						"perplexity_stderr,none": 0.6318862023483438
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5511352610130021,
						"acc_stderr,none": 0.006929452414790844,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 17.334120129468296,
						"perplexity_stderr,none": 0.9070774470877253
					},
					"lambada_standard": {
						"acc,none": 0.7176402095866485,
						"acc_stderr,none": 0.006271434350629785,
						"alias": " - lambada_standard",
						"perplexity,none": 3.5683125033243037,
						"perplexity_stderr,none": 0.06872098766232015
					},
					"logiqa": {
						"acc,none": 0.2642089093701997,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.01771924779845829,
						"acc_stderr,none": 0.01729395454974451,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.523572140720695,
						"acc_stderr,none": 0.12685070914656993,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252604,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.5555555555555556,
						"acc_stderr,none": 0.0429259671825698,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.5197368421052632,
						"acc_stderr,none": 0.04065771002562605,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.04975698519562426,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.569811320754717,
						"acc_stderr,none": 0.030471445867183238,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.5763888888888888,
						"acc_stderr,none": 0.04132125019723369,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.05009082659620332,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.5144508670520231,
						"acc_stderr,none": 0.03810871630454764,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.04389869956808779,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.451063829787234,
						"acc_stderr,none": 0.032529096196131965,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04434600701584926,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.5241379310344828,
						"acc_stderr,none": 0.0416180850350153,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.335978835978836,
						"acc_stderr,none": 0.024326310529149152,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.042163702135578345,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.6709677419354839,
						"acc_stderr,none": 0.026729499068349954,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.39901477832512317,
						"acc_stderr,none": 0.03445487686264715,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.05009082659620333,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.7090909090909091,
						"acc_stderr,none": 0.03546563019624336,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.6868686868686869,
						"acc_stderr,none": 0.033042050878136525,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.7357512953367875,
						"acc_stderr,none": 0.03182155050916646,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.46923076923076923,
						"acc_stderr,none": 0.025302958890850154,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3037037037037037,
						"acc_stderr,none": 0.028037929969115007,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.49159663865546216,
						"acc_stderr,none": 0.0324739027656967,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.33112582781456956,
						"acc_stderr,none": 0.038425817186598696,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.6935779816513762,
						"acc_stderr,none": 0.019765517220458523,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.35185185185185186,
						"acc_stderr,none": 0.032568505702936464,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.7009803921568627,
						"acc_stderr,none": 0.03213325717373618,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.7215189873417721,
						"acc_stderr,none": 0.029178682304842548,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.6188340807174888,
						"acc_stderr,none": 0.032596251184168284,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.6335877862595419,
						"acc_stderr,none": 0.04225875451969637,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.48310308182784273,
						"acc_stderr,none": 0.14724337638558094,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.6859504132231405,
						"acc_stderr,none": 0.04236964753041019,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.6759259259259259,
						"acc_stderr,none": 0.045245960070300476,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.656441717791411,
						"acc_stderr,none": 0.03731133519673893,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.44642857142857145,
						"acc_stderr,none": 0.04718471485219588,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5922330097087378,
						"acc_stderr,none": 0.048657775704107696,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.7735042735042735,
						"acc_stderr,none": 0.027421007295392912,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.66,
						"acc_stderr,none": 0.04760952285695237,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.7062579821200511,
						"acc_stderr,none": 0.016287759388491672,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.5780346820809249,
						"acc_stderr,none": 0.02658923114217426,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.20558659217877095,
						"acc_stderr,none": 0.013516116210724202,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.5588235294117647,
						"acc_stderr,none": 0.028431095444176643,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.5954296749275829,
						"acc_stderr,none": 0.0883142346602916,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.639871382636656,
						"acc_stderr,none": 0.02726429759980402,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.5987654320987654,
						"acc_stderr,none": 0.0272725828498398,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.4219858156028369,
						"acc_stderr,none": 0.029462189233370597,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.4132985658409387,
						"acc_stderr,none": 0.012576779494860088,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.5588235294117647,
						"acc_stderr,none": 0.030161911930767102,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.5343137254901961,
						"acc_stderr,none": 0.020180144843307296,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.6090909090909091,
						"acc_stderr,none": 0.04673752333670237,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.5755102040816327,
						"acc_stderr,none": 0.031642094879429414,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.5983100422489438,
						"acc_stderr,none": 0.09511201682945745,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.7562189054726368,
						"acc_stderr,none": 0.03036049015401464,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.44021566761814146,
						"acc_stderr,none": 0.11380993093278662,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.77,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.4759036144578313,
						"acc_stderr,none": 0.03887971849597264,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.7368421052631579,
						"acc_stderr,none": 0.03377310252209205,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.4694854814060112,
						"acc_stderr,none": 0.005037750946507588,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.4627746135069162,
						"acc_stderr,none": 0.005028797859706236,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7377450980392157,
						"acc_stderr,none": 0.021803076015336122,
						"alias": " - mrpc",
						"f1,none": 0.8381240544629349,
						"f1_stderr,none": 0.015439715144738532
					},
					"openbookqa": {
						"acc,none": 0.332,
						"acc_norm,none": 0.452,
						"acc_norm_stderr,none": 0.02227969410784342,
						"acc_stderr,none": 0.021081766571222852,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.394,
						"acc_stderr,none": 0.010928939603659161,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3185,
						"acc_stderr,none": 0.010420324687037763,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.357,
						"acc_stderr,none": 0.010716012152766246,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5235,
						"acc_stderr,none": 0.011170777418517833,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.539,
						"acc_stderr,none": 0.011149065020234333,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.429,
						"acc_stderr,none": 0.011069813475627662,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4285,
						"acc_stderr,none": 0.011068203447885417,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.42707142857142855,
						"acc_stderr,none": 0.060223946611407994,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7959738846572362,
						"acc_norm,none": 0.7997823721436343,
						"acc_norm_stderr,none": 0.009336465387350843,
						"acc_stderr,none": 0.009402378102942638,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7702514701611791,
						"acc_norm,none": 0.6508724136223338,
						"acc_norm_stderr,none": 0.008770227594569864,
						"acc_stderr,none": 0.14777088013311926,
						"alias": "pythia",
						"bits_per_byte,none": 0.6026420823429972,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5184949202602755,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.0242505065529364,
						"perplexity_stderr,none": 0.0550133363375002,
						"word_perplexity,none": 9.33466849451017,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.5136371956800293,
						"acc_stderr,none": 0.0067628937147980635,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.7530051941627505,
						"acc_stderr,none": 0.002144847388853043,
						"alias": " - qqp",
						"f1,none": 0.7293180093245148,
						"f1_stderr,none": 0.002612735124366855
					},
					"record": {
						"alias": "record",
						"em,none": 0.2707,
						"em_stderr,none": 0.0044434362167740035,
						"f1,none": 0.27995190498828887,
						"f1_stderr,none": 0.004453325023793155
					},
					"rte": {
						"acc,none": 0.628158844765343,
						"acc_stderr,none": 0.029091018492217458,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.946,
						"acc_norm,none": 0.913,
						"acc_norm_stderr,none": 0.00891686663074588,
						"acc_stderr,none": 0.007150883521295439,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.893348623853211,
						"acc_stderr,none": 0.010458867008246884,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3862826440381463,
						"acc_stderr,none": 0.001397113804422153,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.4369645042839657,
						"bleu_acc_stderr,none": 0.017363844503195974,
						"bleu_diff,none": -1.993836339002286,
						"bleu_diff_stderr,none": 0.9263067134836416,
						"bleu_max,none": 28.649825911043322,
						"bleu_max_stderr,none": 0.8275912248060677,
						"rouge1_acc,none": 0.43818849449204406,
						"rouge1_acc_stderr,none": 0.017369236164404434,
						"rouge1_diff,none": -1.7142257235369747,
						"rouge1_diff_stderr,none": 1.0706172290271831,
						"rouge1_max,none": 54.97643298355719,
						"rouge1_max_stderr,none": 0.8584482932194394,
						"rouge2_acc,none": 0.3561811505507956,
						"rouge2_acc_stderr,none": 0.01676379072844633,
						"rouge2_diff,none": -3.2576316639215595,
						"rouge2_diff_stderr,none": 1.2592548519807907,
						"rouge2_max,none": 39.32502950857435,
						"rouge2_max_stderr,none": 1.0500412403817407,
						"rougeL_acc,none": 0.4186046511627907,
						"rougeL_acc_stderr,none": 0.01727001528447686,
						"rougeL_diff,none": -1.8018984569913754,
						"rougeL_diff_stderr,none": 1.0836529851359833,
						"rougeL_max,none": 52.02015692571969,
						"rougeL_max_stderr,none": 0.8751242211488833
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.4369645042839657,
						"bleu_acc_stderr,none": 0.017363844503195974,
						"bleu_diff,none": -1.993836339002286,
						"bleu_diff_stderr,none": 0.9263067134836416,
						"bleu_max,none": 28.649825911043322,
						"bleu_max_stderr,none": 0.8275912248060677,
						"rouge1_acc,none": 0.43818849449204406,
						"rouge1_acc_stderr,none": 0.017369236164404434,
						"rouge1_diff,none": -1.7142257235369747,
						"rouge1_diff_stderr,none": 1.0706172290271831,
						"rouge1_max,none": 54.97643298355719,
						"rouge1_max_stderr,none": 0.8584482932194394,
						"rouge2_acc,none": 0.3561811505507956,
						"rouge2_acc_stderr,none": 0.01676379072844633,
						"rouge2_diff,none": -3.2576316639215595,
						"rouge2_diff_stderr,none": 1.2592548519807907,
						"rouge2_max,none": 39.32502950857435,
						"rouge2_max_stderr,none": 1.0500412403817407,
						"rougeL_acc,none": 0.4186046511627907,
						"rougeL_acc_stderr,none": 0.01727001528447686,
						"rougeL_diff,none": -1.8018984569913754,
						"rougeL_diff_stderr,none": 1.0836529851359833,
						"rougeL_max,none": 52.02015692571969,
						"rougeL_max_stderr,none": 0.8751242211488833
					},
					"truthfulqa_mc1": {
						"acc,none": 0.3182374541003672,
						"acc_stderr,none": 0.016305988648920626,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4543278339759253,
						"acc_stderr,none": 0.0145651452120705,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6026420823429972,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5184949202602755,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 9.33466849451017,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7103393843725335,
						"acc_stderr,none": 0.012748550807638261,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.04770204856076104,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6434545454545454,
						"acc_stderr,none": 0.07616959540381336,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.021613289165165788,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.02231133324528966,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.019536923574747605,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.756,
						"acc_stderr,none": 0.01922673489361458,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.021874299301689253,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.02204949796982787,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.67,
						"acc_stderr,none": 0.0210496121661348,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.748,
						"acc_stderr,none": 0.019435727282249547,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.020011219298073528,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4452208835341365,
						"acc_stderr,none": 0.050895952225549296,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667057,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.46947791164658637,
						"acc_stderr,none": 0.010003382355314755,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4995983935742972,
						"acc_stderr,none": 0.010022069634353847,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.41285140562248995,
						"acc_stderr,none": 0.009868665943084417,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5445783132530121,
						"acc_stderr,none": 0.009982161147576343,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5044176706827309,
						"acc_stderr,none": 0.01002168168176934,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4987951807228916,
						"acc_stderr,none": 0.010022043771315568,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.4457831325301205,
						"acc_stderr,none": 0.009962979511168334,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4883534136546185,
						"acc_stderr,none": 0.010019353650807708,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.41927710843373495,
						"acc_stderr,none": 0.00989059913739193,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.39799196787148594,
						"acc_stderr,none": 0.009811284026425582,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4666666666666667,
						"acc_stderr,none": 0.009999776793187632,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.43453815261044176,
						"acc_stderr,none": 0.009935807354856828,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40923694779116465,
						"acc_stderr,none": 0.009855567414480236,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3530120481927711,
						"acc_stderr,none": 0.009579225840709717,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6636784790325492,
						"acc_stderr,none": 0.05965518629992475,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.6505625413633356,
						"acc_stderr,none": 0.012269893190222854,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.8067504963600265,
						"acc_stderr,none": 0.010161090993950328,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7372600926538716,
						"acc_stderr,none": 0.011326217854313123,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5976174718729318,
						"acc_stderr,none": 0.012619516819528716,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6505625413633356,
						"acc_stderr,none": 0.012269893190222852,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6896095301125083,
						"acc_stderr,none": 0.01190604015249926,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5704831237590999,
						"acc_stderr,none": 0.012738639381353997,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.7246856386499008,
						"acc_stderr,none": 0.011494783262044592,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5777630708140304,
						"acc_stderr,none": 0.012710555263676445,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.6254136333553938,
						"acc_stderr,none": 0.012455787254852482,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6697551290536069,
						"acc_stderr,none": 0.012102848336416566,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8332209485277591,
						"acc_stderr,none": 0.0365370654435725,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8890322580645161,
						"acc_stderr,none": 0.006515369825999424,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109368,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7601668404588112,
						"acc_stderr,none": 0.01379514661279667,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7946768060836502,
						"acc_stderr,none": 0.02495534790673793,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.7015873015873015,
						"acc_stderr,none": 0.025821691360354258,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.8333333333333334,
						"acc_stderr,none": 0.016616890547541167,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "RWKV/v6-Finch-14B-HF"
	},
	"RWKV/v6-Finch-7B-HF": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6302142051860203,
						"acc_norm,none": 0.6206313416009019,
						"acc_norm_stderr,none": 0.08598597548576482,
						"acc_stderr,none": 0.10199410534961573,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.35875,
						"acc_stderr,none": 0.016792006145472556,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8387313432835821,
						"acc_stderr,none": 0.14599258700673573,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.39595924710758074,
						"acc_norm,none": 0.39595924710758074,
						"acc_norm_stderr,none": 0.08795998481795249,
						"acc_stderr,none": 0.08795998481795249,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.6257295140543115,
						"acc_stderr,none": 0.023735154848757798,
						"alias": "glue",
						"f1,none": 0.742083944776421,
						"f1_stderr,none": 9.454186854996372e-05,
						"mcc,none": -0.013195212948657779,
						"mcc_stderr,none": 0.0305140596748652
					},
					"lambada": {
						"acc,none": 0.7209392586842616,
						"acc_stderr,none": 0.0157456787378409,
						"alias": "lambada",
						"perplexity,none": 3.669557605771887,
						"perplexity_stderr,none": 0.20390506070459138
					},
					"lambada_multilingual": {
						"acc,none": 0.5455074713758976,
						"acc_stderr,none": 0.08392868908575618,
						"alias": "lambada_multilingual",
						"perplexity,none": 19.902184608666243,
						"perplexity_stderr,none": 7.729128475109806
					},
					"mmlu": {
						"acc,none": 0.41760432986754026,
						"acc_stderr,none": 0.09366544229867867,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.40403825717322,
						"acc_stderr,none": 0.09977027072687186,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.47119407788863854,
						"acc_stderr,none": 0.07659522798332954,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.46441338966525836,
						"acc_stderr,none": 0.07277213943118718,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3393593403108151,
						"acc_stderr,none": 0.08406650611477154,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.44421428571428573,
						"acc_stderr,none": 0.05735734719146903,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7566790434622362,
						"acc_norm,none": 0.624648872760461,
						"acc_norm_stderr,none": 0.009371239164298838,
						"acc_stderr,none": 0.14297577266995612,
						"alias": "pythia",
						"bits_per_byte,none": 0.623440267803835,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5405444052770116,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2888187363017827,
						"perplexity_stderr,none": 0.0630081192601602,
						"word_perplexity,none": 10.082741516254622,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.34821501412609085,
						"acc_stderr,none": 0.0015963359550631878,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.41982864137086906,
						"bleu_acc_stderr,none": 0.017277030301775766,
						"bleu_diff,none": -0.29463271377209205,
						"bleu_diff_stderr,none": 0.988349254929596,
						"bleu_max,none": 31.39369421270807,
						"bleu_max_stderr,none": 0.8290875013034128,
						"rouge1_acc,none": 0.390452876376989,
						"rouge1_acc_stderr,none": 0.017078230743431445,
						"rouge1_diff,none": 0.3256498356161678,
						"rouge1_diff_stderr,none": 1.178912669156009,
						"rouge1_max,none": 57.3629241771768,
						"rouge1_max_stderr,none": 0.864246963557567,
						"rouge2_acc,none": 0.35495716034271724,
						"rouge2_acc_stderr,none": 0.016750862381375905,
						"rouge2_diff,none": -0.7611996177378746,
						"rouge2_diff_stderr,none": 1.3710202955405815,
						"rouge2_max,none": 42.46270508573572,
						"rouge2_max_stderr,none": 1.0577136192667103,
						"rougeL_acc,none": 0.38555691554467564,
						"rougeL_acc_stderr,none": 0.017038839010591677,
						"rougeL_diff,none": 0.31681892702599923,
						"rougeL_diff_stderr,none": 1.1938175647429725,
						"rougeL_max,none": 54.57872608854838,
						"rougeL_max_stderr,none": 0.889186741565874
					},
					"xcopa": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.0671219201247907,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4419812583668005,
						"acc_stderr,none": 0.05072266385982506,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6400336923169484,
						"acc_stderr,none": 0.06170534462201266,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8098449089683075,
						"acc_stderr,none": 0.0385965554550919,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6302142051860203,
						"acc_norm,none": 0.6206313416009019,
						"acc_norm_stderr,none": 0.08598597548576482,
						"acc_stderr,none": 0.10199410534961573,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.35875,
						"acc_stderr,none": 0.016792006145472556,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.374,
						"acc_stderr,none": 0.015308767369006361,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.342,
						"acc_stderr,none": 0.015008706182121731,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.013862183574189902,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.41467576791808874,
						"acc_norm,none": 0.439419795221843,
						"acc_norm_stderr,none": 0.014503747823580123,
						"acc_stderr,none": 0.014397070564409172,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7365319865319865,
						"acc_norm,none": 0.710016835016835,
						"acc_norm_stderr,none": 0.009310840970769039,
						"acc_stderr,none": 0.009039157374497711,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8387313432835821,
						"acc_stderr,none": 0.14599258700673573,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745934,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.003845749574502999,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469343,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.831,
						"acc_stderr,none": 0.01185662597789012,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400243,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.013106173040661778,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858929,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.749,
						"acc_stderr,none": 0.01371813351688892,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042974,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844884,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319421,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.0070881056172464475,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.006199874066337066,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118756,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621223,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689089,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280308,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.01297583802196877,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.012886662332274527,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877364,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406729,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.01177211037081219,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469339,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.347,
						"acc_stderr,none": 0.015060472031706618,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785124,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.811,
						"acc_stderr,none": 0.012386784588117707,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.679,
						"acc_stderr,none": 0.014770821817934633,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.842,
						"acc_stderr,none": 0.011539894677559559,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333457,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139983,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.0072744014816970735,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.00848457353011858,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.748,
						"acc_stderr,none": 0.013736254390651155,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849877,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.531,
						"acc_stderr,none": 0.015788865959539006,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.681,
						"acc_stderr,none": 0.01474640486547348,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.683,
						"acc_stderr,none": 0.014721675438880227,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.00745483565040673,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.819,
						"acc_stderr,none": 0.012181436179177914,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.0096168333396958,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.00918887563499668,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.011828605831454276,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163045,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298406,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397222,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.01297583802196877,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.015625625112620667,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.0047427305946568,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523719,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426496,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.645,
						"acc_stderr,none": 0.015139491543780534,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.431,
						"acc_stderr,none": 0.015667944488173508,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408037,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286425,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.656,
						"acc_stderr,none": 0.015029633724408947,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524308,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.00957536880165387,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.01180043432464459,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837956,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.00702462421381714,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178357,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727024,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.383,
						"acc_stderr,none": 0.015380102325652711,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.312,
						"acc_stderr,none": 0.014658474370509008,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.39595924710758074,
						"acc_norm,none": 0.39595924710758074,
						"acc_norm_stderr,none": 0.08795998481795249,
						"acc_stderr,none": 0.08795998481795249,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.378698224852071,
						"acc_norm,none": 0.378698224852071,
						"acc_norm_stderr,none": 0.037423404262347686,
						"acc_stderr,none": 0.037423404262347686,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.3108108108108108,
						"acc_norm,none": 0.3108108108108108,
						"acc_norm_stderr,none": 0.03817320450441154,
						"acc_stderr,none": 0.03817320450441154,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.31097560975609756,
						"acc_norm,none": 0.31097560975609756,
						"acc_norm_stderr,none": 0.03625656529444609,
						"acc_stderr,none": 0.03625656529444609,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.54375,
						"acc_norm,none": 0.54375,
						"acc_norm_stderr,none": 0.039500492593059405,
						"acc_stderr,none": 0.039500492593059405,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624336,
						"acc_stderr,none": 0.03546563019624336,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.44019138755980863,
						"acc_norm,none": 0.44019138755980863,
						"acc_norm_stderr,none": 0.03441984346875156,
						"acc_stderr,none": 0.03441984346875156,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.40625,
						"acc_norm,none": 0.40625,
						"acc_norm_stderr,none": 0.03894932504400619,
						"acc_stderr,none": 0.03894932504400619,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.4961832061068702,
						"acc_norm,none": 0.4961832061068702,
						"acc_norm_stderr,none": 0.043851623256015534,
						"acc_stderr,none": 0.043851623256015534,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.4264705882352941,
						"acc_norm,none": 0.4264705882352941,
						"acc_norm_stderr,none": 0.04256528107076942,
						"acc_stderr,none": 0.04256528107076942,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.5233644859813084,
						"acc_norm,none": 0.5233644859813084,
						"acc_norm_stderr,none": 0.048511241723296745,
						"acc_stderr,none": 0.048511241723296745,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.47987616099071206,
						"acc_norm,none": 0.47987616099071206,
						"acc_norm_stderr,none": 0.027841333447728496,
						"acc_stderr,none": 0.027841333447728496,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3284313725490196,
						"acc_norm,none": 0.3284313725490196,
						"acc_norm_stderr,none": 0.03296245110172228,
						"acc_stderr,none": 0.03296245110172228,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.4581005586592179,
						"acc_norm,none": 0.4581005586592179,
						"acc_norm_stderr,none": 0.03734476760540699,
						"acc_stderr,none": 0.03734476760540699,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.3755274261603376,
						"acc_norm,none": 0.3755274261603376,
						"acc_norm_stderr,none": 0.03152256243091156,
						"acc_stderr,none": 0.03152256243091156,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.04084247315337099,
						"acc_stderr,none": 0.04084247315337099,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.5794392523364486,
						"acc_norm,none": 0.5794392523364486,
						"acc_norm_stderr,none": 0.047947436351895946,
						"acc_stderr,none": 0.047947436351895946,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.32407407407407407,
						"acc_norm,none": 0.32407407407407407,
						"acc_norm_stderr,none": 0.04524596007030048,
						"acc_stderr,none": 0.04524596007030048,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714283,
						"acc_stderr,none": 0.04285714285714283,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.29245283018867924,
						"acc_norm,none": 0.29245283018867924,
						"acc_norm_stderr,none": 0.04439263906199629,
						"acc_stderr,none": 0.04439263906199629,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.3516483516483517,
						"acc_norm,none": 0.3516483516483517,
						"acc_norm_stderr,none": 0.02895177204467835,
						"acc_stderr,none": 0.02895177204467835,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.4264705882352941,
						"acc_norm,none": 0.4264705882352941,
						"acc_norm_stderr,none": 0.03471157907953427,
						"acc_stderr,none": 0.03471157907953427,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.4444444444444444,
						"acc_norm,none": 0.4444444444444444,
						"acc_norm_stderr,none": 0.03811079669833531,
						"acc_stderr,none": 0.03811079669833531,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.3673469387755102,
						"acc_norm,none": 0.3673469387755102,
						"acc_norm_stderr,none": 0.03989739969449137,
						"acc_stderr,none": 0.03989739969449137,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.34532374100719426,
						"acc_norm,none": 0.34532374100719426,
						"acc_norm_stderr,none": 0.04047501062151218,
						"acc_stderr,none": 0.04047501062151218,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.4339622641509434,
						"acc_norm,none": 0.4339622641509434,
						"acc_norm_stderr,none": 0.0394293967186222,
						"acc_stderr,none": 0.0394293967186222,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.48466257668711654,
						"acc_norm,none": 0.48466257668711654,
						"acc_norm_stderr,none": 0.03926522378708843,
						"acc_stderr,none": 0.03926522378708843,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.38372093023255816,
						"acc_norm,none": 0.38372093023255816,
						"acc_norm_stderr,none": 0.03718762118238795,
						"acc_stderr,none": 0.03718762118238795,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.31746031746031744,
						"acc_norm,none": 0.31746031746031744,
						"acc_norm_stderr,none": 0.029381354652032128,
						"acc_stderr,none": 0.029381354652032128,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.3888888888888889,
						"acc_norm,none": 0.3888888888888889,
						"acc_norm_stderr,none": 0.0347327959083696,
						"acc_stderr,none": 0.0347327959083696,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.5336134453781513,
						"acc_norm,none": 0.5336134453781513,
						"acc_norm_stderr,none": 0.03240501447690071,
						"acc_stderr,none": 0.03240501447690071,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.28695652173913044,
						"acc_norm,none": 0.28695652173913044,
						"acc_norm_stderr,none": 0.029891541673635464,
						"acc_stderr,none": 0.029891541673635464,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.35555555555555557,
						"acc_norm,none": 0.35555555555555557,
						"acc_norm_stderr,none": 0.04135176749720386,
						"acc_stderr,none": 0.04135176749720386,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.4125874125874126,
						"acc_norm,none": 0.4125874125874126,
						"acc_norm_stderr,none": 0.041312876923923436,
						"acc_stderr,none": 0.041312876923923436,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.39204545454545453,
						"acc_norm,none": 0.39204545454545453,
						"acc_norm_stderr,none": 0.03690496026403127,
						"acc_stderr,none": 0.03690496026403127,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.3959731543624161,
						"acc_norm,none": 0.3959731543624161,
						"acc_norm_stderr,none": 0.040200377787210195,
						"acc_stderr,none": 0.040200377787210195,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.3254437869822485,
						"acc_norm,none": 0.3254437869822485,
						"acc_norm_stderr,none": 0.03614867847292204,
						"acc_stderr,none": 0.03614867847292204,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.26515151515151514,
						"acc_norm,none": 0.26515151515151514,
						"acc_norm_stderr,none": 0.03856650735812559,
						"acc_stderr,none": 0.03856650735812559,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3728813559322034,
						"acc_norm,none": 0.3728813559322034,
						"acc_norm_stderr,none": 0.04470614886582575,
						"acc_stderr,none": 0.04470614886582575,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.033635910482728223,
						"acc_stderr,none": 0.033635910482728223,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.04389311454644286,
						"acc_stderr,none": 0.04389311454644286,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3916083916083916,
						"acc_norm,none": 0.3916083916083916,
						"acc_norm_stderr,none": 0.04096127157727561,
						"acc_stderr,none": 0.04096127157727561,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.4523809523809524,
						"acc_norm,none": 0.4523809523809524,
						"acc_norm_stderr,none": 0.044518079590553275,
						"acc_stderr,none": 0.044518079590553275,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.3567567567567568,
						"acc_norm,none": 0.3567567567567568,
						"acc_norm_stderr,none": 0.035315455206482514,
						"acc_stderr,none": 0.035315455206482514,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.4127906976744186,
						"acc_norm,none": 0.4127906976744186,
						"acc_norm_stderr,none": 0.037649859438232,
						"acc_stderr,none": 0.037649859438232,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.34306569343065696,
						"acc_norm,none": 0.34306569343065696,
						"acc_norm_stderr,none": 0.02344540224423849,
						"acc_stderr,none": 0.02344540224423849,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.6822429906542056,
						"acc_norm,none": 0.6822429906542056,
						"acc_norm_stderr,none": 0.03190269039219335,
						"acc_stderr,none": 0.03190269039219335,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.34959349593495936,
						"acc_norm,none": 0.34959349593495936,
						"acc_norm_stderr,none": 0.04317120734620423,
						"acc_stderr,none": 0.04317120734620423,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3442622950819672,
						"acc_norm,none": 0.3442622950819672,
						"acc_norm_stderr,none": 0.04319337331204006,
						"acc_stderr,none": 0.04319337331204006,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.4142857142857143,
						"acc_norm,none": 0.4142857142857143,
						"acc_norm_stderr,none": 0.03407373521784954,
						"acc_stderr,none": 0.03407373521784954,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.4722222222222222,
						"acc_norm,none": 0.4722222222222222,
						"acc_norm_stderr,none": 0.037314037607574575,
						"acc_stderr,none": 0.037314037607574575,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.47619047619047616,
						"acc_norm,none": 0.47619047619047616,
						"acc_norm_stderr,none": 0.03642487945744188,
						"acc_stderr,none": 0.03642487945744188,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.35344827586206895,
						"acc_norm,none": 0.35344827586206895,
						"acc_norm_stderr,none": 0.044577494043928884,
						"acc_stderr,none": 0.044577494043928884,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3931034482758621,
						"acc_norm,none": 0.3931034482758621,
						"acc_norm_stderr,none": 0.0407032901370707,
						"acc_stderr,none": 0.0407032901370707,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.49523809523809526,
						"acc_norm,none": 0.49523809523809526,
						"acc_norm_stderr,none": 0.049026810195176226,
						"acc_stderr,none": 0.049026810195176226,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.4342857142857143,
						"acc_norm,none": 0.4342857142857143,
						"acc_norm_stderr,none": 0.037576101528126626,
						"acc_stderr,none": 0.037576101528126626,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.3033175355450237,
						"acc_norm,none": 0.3033175355450237,
						"acc_norm_stderr,none": 0.03172170716716874,
						"acc_stderr,none": 0.03172170716716874,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2872340425531915,
						"acc_norm,none": 0.2872340425531915,
						"acc_norm_stderr,none": 0.023365538575816754,
						"acc_stderr,none": 0.023365538575816754,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.44396551724137934,
						"acc_norm,none": 0.44396551724137934,
						"acc_norm_stderr,none": 0.03269034414952844,
						"acc_stderr,none": 0.03269034414952844,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.4425287356321839,
						"acc_norm,none": 0.4425287356321839,
						"acc_norm_stderr,none": 0.037762342756690645,
						"acc_stderr,none": 0.037762342756690645,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.4222222222222222,
						"acc_norm,none": 0.4222222222222222,
						"acc_norm_stderr,none": 0.04266763404099582,
						"acc_stderr,none": 0.04266763404099582,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.39823008849557523,
						"acc_norm,none": 0.39823008849557523,
						"acc_norm_stderr,none": 0.032635555601036786,
						"acc_stderr,none": 0.032635555601036786,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.40606060606060607,
						"acc_norm,none": 0.40606060606060607,
						"acc_norm_stderr,none": 0.03834816355401181,
						"acc_stderr,none": 0.03834816355401181,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.31891891891891894,
						"acc_norm,none": 0.31891891891891894,
						"acc_norm_stderr,none": 0.034358218597411866,
						"acc_stderr,none": 0.034358218597411866,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.4260355029585799,
						"acc_norm,none": 0.4260355029585799,
						"acc_norm_stderr,none": 0.038151425516134464,
						"acc_stderr,none": 0.038151425516134464,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.5652173913043478,
						"acc_norm,none": 0.5652173913043478,
						"acc_norm_stderr,none": 0.039190774733304186,
						"acc_stderr,none": 0.039190774733304186,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.49375,
						"acc_norm,none": 0.49375,
						"acc_norm_stderr,none": 0.03964948130713094,
						"acc_stderr,none": 0.03964948130713094,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.013195212948657779,
						"mcc_stderr,none": 0.0305140596748652
					},
					"copa": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.035887028128263714,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.6257295140543115,
						"acc_stderr,none": 0.023735154848757798,
						"alias": "glue",
						"f1,none": 0.742083944776421,
						"f1_stderr,none": 9.454186854996372e-05,
						"mcc,none": -0.013195212948657779,
						"mcc_stderr,none": 0.0305140596748652
					},
					"hellaswag": {
						"acc,none": 0.5596494722166899,
						"acc_norm,none": 0.7518422624975104,
						"acc_norm_stderr,none": 0.0043106106168457085,
						"acc_stderr,none": 0.004954146286513351,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7209392586842616,
						"acc_stderr,none": 0.0157456787378409,
						"alias": "lambada",
						"perplexity,none": 3.669557605771887,
						"perplexity_stderr,none": 0.20390506070459138
					},
					"lambada_multilingual": {
						"acc,none": 0.5455074713758976,
						"acc_stderr,none": 0.08392868908575618,
						"alias": "lambada_multilingual",
						"perplexity,none": 19.902184608666243,
						"perplexity_stderr,none": 7.729128475109806
					},
					"lambada_openai": {
						"acc,none": 0.7490782068697845,
						"acc_stderr,none": 0.0060401099618007685,
						"alias": " - lambada_openai",
						"perplexity,none": 3.2888187363017827,
						"perplexity_stderr,none": 0.0630081192601602
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4383854065592859,
						"acc_stderr,none": 0.006912884634249905,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 32.57015889836838,
						"perplexity_stderr,none": 1.8269054961149331
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7488841451581603,
						"acc_stderr,none": 0.006041662455556341,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.2901109811441964,
						"perplexity_stderr,none": 0.06309813807790049
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.45546283718222397,
						"acc_stderr,none": 0.00693828776972325,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 27.273365860251072,
						"perplexity_stderr,none": 1.340805602505681
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5614205317290899,
						"acc_stderr,none": 0.006913219825972168,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 15.606585514626458,
						"perplexity_stderr,none": 0.7593149529929747
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5233844362507277,
						"acc_stderr,none": 0.006958355049604452,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 20.77070178894111,
						"perplexity_stderr,none": 1.1062651491616315
					},
					"lambada_standard": {
						"acc,none": 0.6920240636522415,
						"acc_stderr,none": 0.006431778256505183,
						"alias": " - lambada_standard",
						"perplexity,none": 4.049679725575443,
						"perplexity_stderr,none": 0.083249201841488
					},
					"logiqa": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.26881720430107525,
						"acc_norm_stderr,none": 0.01738940946371262,
						"acc_stderr,none": 0.016705867034419633,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.41760432986754026,
						"acc_stderr,none": 0.09366544229867867,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4888888888888889,
						"acc_stderr,none": 0.04318275491977976,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.4144736842105263,
						"acc_stderr,none": 0.04008973785779206,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.04975698519562428,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.5245283018867924,
						"acc_stderr,none": 0.030735822206205615,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4097222222222222,
						"acc_stderr,none": 0.04112490974670788,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3699421965317919,
						"acc_stderr,none": 0.0368122963339432,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.22549019607843138,
						"acc_stderr,none": 0.04158307533083286,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.04975698519562428,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.4127659574468085,
						"acc_stderr,none": 0.03218471141400351,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2807017543859649,
						"acc_stderr,none": 0.042270544512322,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.31724137931034485,
						"acc_stderr,none": 0.03878352372138622,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.291005291005291,
						"acc_stderr,none": 0.02339382650048487,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04216370213557835,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.5096774193548387,
						"acc_stderr,none": 0.02843867799890955,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.33004926108374383,
						"acc_stderr,none": 0.03308530426228258,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695236,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5818181818181818,
						"acc_stderr,none": 0.03851716319398393,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.47474747474747475,
						"acc_stderr,none": 0.035578062450873145,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5233160621761658,
						"acc_stderr,none": 0.03604513672442202,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3923076923076923,
						"acc_stderr,none": 0.02475600038213095,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23333333333333334,
						"acc_stderr,none": 0.025787874220959316,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.35714285714285715,
						"acc_stderr,none": 0.031124619309328177,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.26490066225165565,
						"acc_stderr,none": 0.036030385453603854,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5577981651376147,
						"acc_stderr,none": 0.0212936132075202,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.029157522184605603,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5588235294117647,
						"acc_stderr,none": 0.034849415144292316,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.6540084388185654,
						"acc_stderr,none": 0.03096481058878671,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.47533632286995514,
						"acc_stderr,none": 0.033516951676526276,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.4732824427480916,
						"acc_stderr,none": 0.04379024936553894,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.40403825717322,
						"acc_stderr,none": 0.09977027072687186,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.48760330578512395,
						"acc_stderr,none": 0.04562951548180765,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.4444444444444444,
						"acc_stderr,none": 0.04803752235190193,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3987730061349693,
						"acc_stderr,none": 0.038470214204560246,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.044328040552915206,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.49514563106796117,
						"acc_stderr,none": 0.049505043821289195,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.5854700854700855,
						"acc_stderr,none": 0.03227396567623779,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.04943110704237101,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5887611749680716,
						"acc_stderr,none": 0.017595971908056573,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.4277456647398844,
						"acc_stderr,none": 0.02663653974111608,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24134078212290502,
						"acc_stderr,none": 0.014310999547961447,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.4215686274509804,
						"acc_stderr,none": 0.02827549015679143,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.47119407788863854,
						"acc_stderr,none": 0.07659522798332954,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.5112540192926045,
						"acc_stderr,none": 0.028390897396863533,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4845679012345679,
						"acc_stderr,none": 0.02780749004427621,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.30141843971631205,
						"acc_stderr,none": 0.027374128882631153,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3546284224250326,
						"acc_stderr,none": 0.01221857643909017,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4375,
						"acc_stderr,none": 0.030134614954403924,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.43137254901960786,
						"acc_stderr,none": 0.02003639376835263,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.04789131426105757,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3877551020408163,
						"acc_stderr,none": 0.03119223072679566,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.46441338966525836,
						"acc_stderr,none": 0.07277213943118718,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.6368159203980099,
						"acc_stderr,none": 0.034005985055990146,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3393593403108151,
						"acc_stderr,none": 0.08406650611477154,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.04988876515698589,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.03726214354322415,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.5730994152046783,
						"acc_stderr,none": 0.03793620616529917,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3695364238410596,
						"acc_stderr,none": 0.004872317708738307,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.36310008136696503,
						"acc_stderr,none": 0.004850091386663449,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.021463642763705344,
						"alias": " - mrpc",
						"f1,none": 0.8425925925925926,
						"f1_stderr,none": 0.015383433403512555
					},
					"openbookqa": {
						"acc,none": 0.318,
						"acc_norm,none": 0.44,
						"acc_norm_stderr,none": 0.022221331534143057,
						"acc_stderr,none": 0.02084757162081401,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.3775,
						"acc_stderr,none": 0.010842308463902533,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.367,
						"acc_stderr,none": 0.010780241500235484,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3635,
						"acc_stderr,none": 0.010758333787020003,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.533,
						"acc_stderr,none": 0.011158752568250671,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5365,
						"acc_stderr,none": 0.011153298751334336,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.466,
						"acc_stderr,none": 0.011157250652425768,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.466,
						"acc_stderr,none": 0.01115725065242577,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.44421428571428573,
						"acc_stderr,none": 0.05735734719146903,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7840043525571273,
						"acc_norm,none": 0.79379760609358,
						"acc_norm_stderr,none": 0.009439460331609502,
						"acc_stderr,none": 0.009601236303553553,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7566790434622362,
						"acc_norm,none": 0.624648872760461,
						"acc_norm_stderr,none": 0.009371239164298838,
						"acc_stderr,none": 0.14297577266995612,
						"alias": "pythia",
						"bits_per_byte,none": 0.623440267803835,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5405444052770116,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2888187363017827,
						"perplexity_stderr,none": 0.0630081192601602,
						"word_perplexity,none": 10.082741516254622,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49679663188724144,
						"acc_stderr,none": 0.006765271702920654,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.7643334157803611,
						"acc_stderr,none": 0.002110784650445594,
						"alias": " - qqp",
						"f1,none": 0.7412135368569721,
						"f1_stderr,none": 0.0025655303539491798
					},
					"record": {
						"alias": "record",
						"em,none": 0.2724,
						"em_stderr,none": 0.004452168705318793,
						"f1,none": 0.2821585716784,
						"f1_stderr,none": 0.004463016671491972
					},
					"rte": {
						"acc,none": 0.5523465703971119,
						"acc_stderr,none": 0.02993107036293953,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.934,
						"acc_norm,none": 0.891,
						"acc_norm_stderr,none": 0.00985982840703718,
						"acc_stderr,none": 0.007855297938697593,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.8337155963302753,
						"acc_stderr,none": 0.012616115146293391,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.34821501412609085,
						"acc_stderr,none": 0.0015963359550631878,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.41982864137086906,
						"bleu_acc_stderr,none": 0.017277030301775766,
						"bleu_diff,none": -0.29463271377209205,
						"bleu_diff_stderr,none": 0.988349254929596,
						"bleu_max,none": 31.39369421270807,
						"bleu_max_stderr,none": 0.8290875013034128,
						"rouge1_acc,none": 0.390452876376989,
						"rouge1_acc_stderr,none": 0.017078230743431445,
						"rouge1_diff,none": 0.3256498356161678,
						"rouge1_diff_stderr,none": 1.178912669156009,
						"rouge1_max,none": 57.3629241771768,
						"rouge1_max_stderr,none": 0.864246963557567,
						"rouge2_acc,none": 0.35495716034271724,
						"rouge2_acc_stderr,none": 0.016750862381375905,
						"rouge2_diff,none": -0.7611996177378746,
						"rouge2_diff_stderr,none": 1.3710202955405815,
						"rouge2_max,none": 42.46270508573572,
						"rouge2_max_stderr,none": 1.0577136192667103,
						"rougeL_acc,none": 0.38555691554467564,
						"rougeL_acc_stderr,none": 0.017038839010591677,
						"rougeL_diff,none": 0.31681892702599923,
						"rougeL_diff_stderr,none": 1.1938175647429725,
						"rougeL_max,none": 54.57872608854838,
						"rougeL_max_stderr,none": 0.889186741565874
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.41982864137086906,
						"bleu_acc_stderr,none": 0.017277030301775766,
						"bleu_diff,none": -0.29463271377209205,
						"bleu_diff_stderr,none": 0.988349254929596,
						"bleu_max,none": 31.39369421270807,
						"bleu_max_stderr,none": 0.8290875013034128,
						"rouge1_acc,none": 0.390452876376989,
						"rouge1_acc_stderr,none": 0.017078230743431445,
						"rouge1_diff,none": 0.3256498356161678,
						"rouge1_diff_stderr,none": 1.178912669156009,
						"rouge1_max,none": 57.3629241771768,
						"rouge1_max_stderr,none": 0.864246963557567,
						"rouge2_acc,none": 0.35495716034271724,
						"rouge2_acc_stderr,none": 0.016750862381375905,
						"rouge2_diff,none": -0.7611996177378746,
						"rouge2_diff_stderr,none": 1.3710202955405815,
						"rouge2_max,none": 42.46270508573572,
						"rouge2_max_stderr,none": 1.0577136192667103,
						"rougeL_acc,none": 0.38555691554467564,
						"rougeL_acc_stderr,none": 0.017038839010591677,
						"rougeL_diff,none": 0.31681892702599923,
						"rougeL_diff_stderr,none": 1.1938175647429725,
						"rougeL_max,none": 54.57872608854838,
						"rougeL_max_stderr,none": 0.889186741565874
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2741738066095471,
						"acc_stderr,none": 0.01561651849721937,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.42225622164263465,
						"acc_stderr,none": 0.014364651712015638,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.623440267803835,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5405444052770116,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.082741516254622,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6827150749802684,
						"acc_stderr,none": 0.013080598411332115,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4225352112676056,
						"acc_stderr,none": 0.05903984205682581,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.41346153846153844,
						"acc_stderr,none": 0.04852294969729053,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.0671219201247907,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.618,
						"acc_stderr,none": 0.02175082059125084,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.022311333245289663,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.020099950647503233,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.019966103540279466,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.02216263442665284,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.022033677993740865,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.02206494331392886,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.021323728632807498,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.712,
						"acc_stderr,none": 0.020271503835075217,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177528,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4419812583668005,
						"acc_stderr,none": 0.05072266385982506,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939166,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4714859437751004,
						"acc_stderr,none": 0.010005762674605288,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4947791164658635,
						"acc_stderr,none": 0.010021526496530347,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39959839357429716,
						"acc_stderr,none": 0.009817939267958266,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5401606425702812,
						"acc_stderr,none": 0.009989691810169688,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5072289156626506,
						"acc_stderr,none": 0.010021025361119635,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4991967871485944,
						"acc_stderr,none": 0.010022059935722397,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.4393574297188755,
						"acc_stderr,none": 0.00994808700111736,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4815261044176707,
						"acc_stderr,none": 0.010015229768356988,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39116465863453814,
						"acc_stderr,none": 0.009781766322010008,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42128514056224897,
						"acc_stderr,none": 0.009897099560589198,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4606425702811245,
						"acc_stderr,none": 0.009990976095711894,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.41847389558232934,
						"acc_stderr,none": 0.009887951897505937,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40602409638554215,
						"acc_stderr,none": 0.00984346200738422,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3642570281124498,
						"acc_stderr,none": 0.009645667910246843,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6400336923169484,
						"acc_stderr,none": 0.06170534462201266,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.614824619457313,
						"acc_stderr,none": 0.012523231571141198,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7915287888815354,
						"acc_stderr,none": 0.010453649359718133,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.71409662475182,
						"acc_stderr,none": 0.011627856346940616,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5665122435473197,
						"acc_stderr,none": 0.012752771973917615,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6174718729318333,
						"acc_stderr,none": 0.012506961215828182,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6664460622104567,
						"acc_stderr,none": 0.012133247747835349,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5466578424884183,
						"acc_stderr,none": 0.012810980537828153,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6975512905360688,
						"acc_stderr,none": 0.011820217487929054,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.557246856386499,
						"acc_stderr,none": 0.012782510750319236,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.6055592322964924,
						"acc_stderr,none": 0.012577106513936133,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6624751819986764,
						"acc_stderr,none": 0.012168840221678032,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8098449089683075,
						"acc_stderr,none": 0.0385965554550919,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8696774193548387,
						"acc_stderr,none": 0.006983463551504547,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7469879518072289,
						"acc_stderr,none": 0.04800875830437278,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7288842544316997,
						"acc_stderr,none": 0.014362296895048145,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7756653992395437,
						"acc_stderr,none": 0.02577120320708472,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6666666666666666,
						"acc_stderr,none": 0.026602896148920776,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.8055555555555556,
						"acc_stderr,none": 0.017646619671294873,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "RWKV/v6-Finch-7B-HF"
	},
	"SmerkyG/rwkv-5-world-1b5": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5112739571589628,
						"acc_norm,none": 0.49239007891770004,
						"acc_norm_stderr,none": 0.07714758965234145,
						"acc_stderr,none": 0.10622886770015459,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3446875,
						"acc_stderr,none": 0.016201421596492432,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8336865671641791,
						"acc_stderr,none": 0.15149236838150676,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.24969780694180624,
						"acc_norm,none": 0.24969780694180624,
						"acc_norm_stderr,none": 0.03784722376131588,
						"acc_stderr,none": 0.03784722376131588,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5410165555026203,
						"acc_stderr,none": 0.012289708247379585,
						"alias": "glue",
						"f1,none": 0.3991229231036883,
						"f1_stderr,none": 0.00018823773677900912,
						"mcc,none": 0.028777377059353095,
						"mcc_stderr,none": 0.029557452442007595
					},
					"lambada": {
						"acc,none": 0.6095478362119154,
						"acc_stderr,none": 0.02462399103409058,
						"alias": "lambada",
						"perplexity,none": 6.369187608169782,
						"perplexity_stderr,none": 0.6794074695255675
					},
					"lambada_multilingual": {
						"acc,none": 0.4484766155637493,
						"acc_stderr,none": 0.0830249431644644,
						"alias": "lambada_multilingual",
						"perplexity,none": 43.18680498264333,
						"perplexity_stderr,none": 16.58118499444968
					},
					"mmlu": {
						"acc,none": 0.2525993448226748,
						"acc_stderr,none": 0.04202282990456397,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.24017003188097769,
						"acc_stderr,none": 0.02857393482131495,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.25683939491470875,
						"acc_stderr,none": 0.05743915320464653,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.26454338641533964,
						"acc_stderr,none": 0.034586953407146494,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.25531240088804313,
						"acc_stderr,none": 0.04558330291190535,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.5192857142857144,
						"acc_stderr,none": 0.029939594331147804,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7164605671706539,
						"acc_norm,none": 0.4995181848102748,
						"acc_norm_stderr,none": 0.008337220905567284,
						"acc_stderr,none": 0.14863316206902988,
						"alias": "pythia",
						"bits_per_byte,none": 0.719096605535433,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.646150916185073,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.055848874703582,
						"perplexity_stderr,none": 0.11854541385297362,
						"word_perplexity,none": 14.373441237489386,
						"word_perplexity_stderr,none": "N/A"
					},
					"xcopa": {
						"acc,none": 0.5787272727272728,
						"acc_stderr,none": 0.04424725212711732,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4044979919678715,
						"acc_stderr,none": 0.04620022346504284,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5785452138860477,
						"acc_stderr,none": 0.046882211406773226,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.731175545066307,
						"acc_stderr,none": 0.04568831187382474,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5112739571589628,
						"acc_norm,none": 0.49239007891770004,
						"acc_norm_stderr,none": 0.07714758965234145,
						"acc_stderr,none": 0.10622886770015459,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3446875,
						"acc_stderr,none": 0.016201421596492432,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.01516792886540756,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.014876872027456727,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3458333333333333,
						"acc_stderr,none": 0.013736245342311012,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.28668941979522183,
						"acc_norm,none": 0.3302047781569966,
						"acc_norm_stderr,none": 0.013743085603760427,
						"acc_stderr,none": 0.013214986329274779,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6220538720538721,
						"acc_norm,none": 0.5723905723905723,
						"acc_norm_stderr,none": 0.010151683397430682,
						"acc_stderr,none": 0.009949405744045459,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8336865671641791,
						"acc_stderr,none": 0.15149236838150676,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662727,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045057,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.00223158687484488,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.012559527926707366,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996695,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.781,
						"acc_stderr,none": 0.013084731950262026,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.642,
						"acc_stderr,none": 0.015167928865407559,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.013807775152234195,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475294,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.001413505570557816,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.002987963843142644,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426109,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427419,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.005893957816165545,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557425,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333454,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910637,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696844,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.796,
						"acc_stderr,none": 0.012749374359024398,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.012559527926707377,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357796,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904635,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.842,
						"acc_stderr,none": 0.011539894677559552,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767667,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.274,
						"acc_stderr,none": 0.014111099288259587,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315148,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.827,
						"acc_stderr,none": 0.011967214137559926,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.014526080235459548,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.01104345769937823,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942307,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697589,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.456,
						"acc_stderr,none": 0.01575792855397917,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665546,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.014385511563477341,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.015663503610155283,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.662,
						"acc_stderr,none": 0.01496596071022448,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410037,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.014111099288259587,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662734,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400243,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.011628164696727191,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.005814534272734976,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.002637794146243775,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248123,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.01360735683959812,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.469,
						"acc_stderr,none": 0.015788865959539006,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695456,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866447,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030127,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.01351231225892086,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.015740004693383845,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475282,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.746,
						"acc_stderr,none": 0.013772206565168543,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.014356395999905687,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946097,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724454,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.759,
						"acc_stderr,none": 0.013531522534515441,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.864,
						"acc_stderr,none": 0.010845350230472988,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163044,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745902,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656799,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389627,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.466,
						"acc_stderr,none": 0.015782683329937628,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.397,
						"acc_stderr,none": 0.015480007449307989,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.24969780694180624,
						"acc_norm,none": 0.24969780694180624,
						"acc_norm_stderr,none": 0.03784722376131588,
						"acc_stderr,none": 0.03784722376131588,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.21893491124260356,
						"acc_norm,none": 0.21893491124260356,
						"acc_norm_stderr,none": 0.03190409884491232,
						"acc_stderr,none": 0.03190409884491232,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.0353866849031339,
						"acc_stderr,none": 0.0353866849031339,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.03494959016177541,
						"acc_stderr,none": 0.03494959016177541,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.033464098810559534,
						"acc_stderr,none": 0.033464098810559534,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.22009569377990432,
						"acc_norm,none": 0.22009569377990432,
						"acc_norm_stderr,none": 0.028727297002576892,
						"acc_stderr,none": 0.028727297002576892,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2748091603053435,
						"acc_norm,none": 0.2748091603053435,
						"acc_norm_stderr,none": 0.03915345408847837,
						"acc_stderr,none": 0.03915345408847837,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2867647058823529,
						"acc_norm,none": 0.2867647058823529,
						"acc_norm_stderr,none": 0.038923544178637824,
						"acc_stderr,none": 0.038923544178637824,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2336448598130841,
						"acc_norm,none": 0.2336448598130841,
						"acc_norm_stderr,none": 0.04109984842463997,
						"acc_stderr,none": 0.04109984842463997,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.24458204334365324,
						"acc_norm,none": 0.24458204334365324,
						"acc_norm_stderr,none": 0.023953997540932172,
						"acc_stderr,none": 0.023953997540932172,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604257,
						"acc_stderr,none": 0.030587591351604257,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2122905027932961,
						"acc_norm,none": 0.2122905027932961,
						"acc_norm_stderr,none": 0.030650553564393286,
						"acc_stderr,none": 0.030650553564393286,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422647,
						"acc_stderr,none": 0.028146970599422647,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.04084247315337099,
						"acc_stderr,none": 0.04084247315337099,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.04556837693674772,
						"acc_stderr,none": 0.04556837693674772,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04186091791394607,
						"acc_stderr,none": 0.04186091791394607,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.043960933774393765,
						"acc_stderr,none": 0.043960933774393765,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2564102564102564,
						"acc_norm,none": 0.2564102564102564,
						"acc_norm_stderr,none": 0.02647585170669971,
						"acc_stderr,none": 0.02647585170669971,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604246,
						"acc_stderr,none": 0.030587591351604246,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.28654970760233917,
						"acc_norm,none": 0.28654970760233917,
						"acc_norm_stderr,none": 0.03467826685703826,
						"acc_stderr,none": 0.03467826685703826,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.272108843537415,
						"acc_norm,none": 0.272108843537415,
						"acc_norm_stderr,none": 0.036832239154550236,
						"acc_stderr,none": 0.036832239154550236,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2158273381294964,
						"acc_norm,none": 0.2158273381294964,
						"acc_norm_stderr,none": 0.03502027344986235,
						"acc_stderr,none": 0.03502027344986235,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.25157232704402516,
						"acc_norm,none": 0.25157232704402516,
						"acc_norm_stderr,none": 0.03452055811164904,
						"acc_stderr,none": 0.03452055811164904,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25766871165644173,
						"acc_norm,none": 0.25766871165644173,
						"acc_norm_stderr,none": 0.03436150827846917,
						"acc_stderr,none": 0.03436150827846917,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2441860465116279,
						"acc_norm,none": 0.2441860465116279,
						"acc_norm_stderr,none": 0.03285260554707745,
						"acc_stderr,none": 0.03285260554707745,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.02688368747322085,
						"acc_stderr,none": 0.02688368747322085,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.23232323232323232,
						"acc_norm,none": 0.23232323232323232,
						"acc_norm_stderr,none": 0.030088629490217483,
						"acc_stderr,none": 0.030088629490217483,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.27310924369747897,
						"acc_norm,none": 0.27310924369747897,
						"acc_norm_stderr,none": 0.028942004040998167,
						"acc_stderr,none": 0.028942004040998167,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.029017133559381268,
						"acc_stderr,none": 0.029017133559381268,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.038532548365520024,
						"acc_stderr,none": 0.038532548365520024,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.26573426573426573,
						"acc_norm,none": 0.26573426573426573,
						"acc_norm_stderr,none": 0.03706860462623559,
						"acc_stderr,none": 0.03706860462623559,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032732683535398856,
						"acc_stderr,none": 0.032732683535398856,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2348993288590604,
						"acc_norm,none": 0.2348993288590604,
						"acc_norm_stderr,none": 0.03484731504650188,
						"acc_stderr,none": 0.03484731504650188,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.03279317792268948,
						"acc_stderr,none": 0.03279317792268948,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.03980329854920432,
						"acc_stderr,none": 0.03980329854920432,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.033047561588107864,
						"acc_stderr,none": 0.033047561588107864,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.041723430387053825,
						"acc_stderr,none": 0.041723430387053825,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.03535681229053242,
						"acc_stderr,none": 0.03535681229053242,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.03809523809523811,
						"acc_stderr,none": 0.03809523809523811,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.03186439492581516,
						"acc_stderr,none": 0.03186439492581516,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.23255813953488372,
						"acc_norm,none": 0.23255813953488372,
						"acc_norm_stderr,none": 0.0323065408320345,
						"acc_stderr,none": 0.0323065408320345,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.24817518248175183,
						"acc_norm,none": 0.24817518248175183,
						"acc_norm_stderr,none": 0.021332687690541908,
						"acc_stderr,none": 0.021332687690541908,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.24766355140186916,
						"acc_norm,none": 0.24766355140186916,
						"acc_norm_stderr,none": 0.029576535293164476,
						"acc_stderr,none": 0.029576535293164476,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.21951219512195122,
						"acc_norm,none": 0.21951219512195122,
						"acc_norm_stderr,none": 0.037474208760847595,
						"acc_stderr,none": 0.037474208760847595,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2459016393442623,
						"acc_norm,none": 0.2459016393442623,
						"acc_norm_stderr,none": 0.03914731903595733,
						"acc_stderr,none": 0.03914731903595733,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24285714285714285,
						"acc_norm,none": 0.24285714285714285,
						"acc_norm_stderr,none": 0.02966137041396584,
						"acc_stderr,none": 0.02966137041396584,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03305282343736876,
						"acc_stderr,none": 0.03305282343736876,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03196107138009966,
						"acc_stderr,none": 0.03196107138009966,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.23275862068965517,
						"acc_norm,none": 0.23275862068965517,
						"acc_norm_stderr,none": 0.039406691683376995,
						"acc_stderr,none": 0.039406691683376995,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2896551724137931,
						"acc_norm,none": 0.2896551724137931,
						"acc_norm_stderr,none": 0.03780019230438014,
						"acc_stderr,none": 0.03780019230438014,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.20952380952380953,
						"acc_norm,none": 0.20952380952380953,
						"acc_norm_stderr,none": 0.039906571509931855,
						"acc_stderr,none": 0.039906571509931855,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.03183348654463748,
						"acc_stderr,none": 0.03183348654463748,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.03011304016776726,
						"acc_stderr,none": 0.03011304016776726,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.23670212765957446,
						"acc_norm,none": 0.23670212765957446,
						"acc_norm_stderr,none": 0.021949896304751585,
						"acc_stderr,none": 0.021949896304751585,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.23275862068965517,
						"acc_norm,none": 0.23275862068965517,
						"acc_norm_stderr,none": 0.02780436020996173,
						"acc_stderr,none": 0.02780436020996173,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.032534138484822554,
						"acc_stderr,none": 0.032534138484822554,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.02895216745089081,
						"acc_stderr,none": 0.02895216745089081,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03453131801885415,
						"acc_stderr,none": 0.03453131801885415,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336699,
						"acc_stderr,none": 0.03333068663336699,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2795031055900621,
						"acc_norm,none": 0.2795031055900621,
						"acc_norm_stderr,none": 0.035477203909303916,
						"acc_stderr,none": 0.035477203909303916,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2125,
						"acc_norm,none": 0.2125,
						"acc_norm_stderr,none": 0.03244189290245473,
						"acc_stderr,none": 0.03244189290245473,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.028777377059353095,
						"mcc_stderr,none": 0.029557452442007595
					},
					"copa": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.04292346959909284,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5410165555026203,
						"acc_stderr,none": 0.012289708247379585,
						"alias": "glue",
						"f1,none": 0.3991229231036883,
						"f1_stderr,none": 0.00018823773677900912,
						"mcc,none": 0.028777377059353095,
						"mcc_stderr,none": 0.029557452442007595
					},
					"hellaswag": {
						"acc,none": 0.42471619199362676,
						"acc_norm,none": 0.5501892053375822,
						"acc_norm_stderr,none": 0.004964579685712438,
						"acc_stderr,none": 0.004932896472460568,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.6095478362119154,
						"acc_stderr,none": 0.02462399103409058,
						"alias": "lambada",
						"perplexity,none": 6.369187608169782,
						"perplexity_stderr,none": 0.6794074695255675
					},
					"lambada_multilingual": {
						"acc,none": 0.4484766155637493,
						"acc_stderr,none": 0.0830249431644644,
						"alias": "lambada_multilingual",
						"perplexity,none": 43.18680498264333,
						"perplexity_stderr,none": 16.58118499444968
					},
					"lambada_openai": {
						"acc,none": 0.6568988938482437,
						"acc_stderr,none": 0.00661412498246103,
						"alias": " - lambada_openai",
						"perplexity,none": 5.055848874703582,
						"perplexity_stderr,none": 0.11854541385297362
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.35066951290510384,
						"acc_stderr,none": 0.006648045374603887,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 65.82972989107675,
						"perplexity_stderr,none": 3.9571956126281833
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6567048321366195,
						"acc_stderr,none": 0.00661501790443367,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 5.056405351554518,
						"perplexity_stderr,none": 0.11860916891457675
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.37104599262565496,
						"acc_stderr,none": 0.006730314981342215,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 61.249035187327245,
						"perplexity_stderr,none": 3.3251943349532094
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.44944692412187076,
						"acc_stderr,none": 0.006930281504471643,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 34.89400012412681,
						"perplexity_stderr,none": 1.8764986780815518
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.4145158160294974,
						"acc_stderr,none": 0.006863414211397148,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 48.90485435913133,
						"perplexity_stderr,none": 2.8348284694345787
					},
					"lambada_standard": {
						"acc,none": 0.562196778575587,
						"acc_stderr,none": 0.006911872616149982,
						"alias": " - lambada_standard",
						"perplexity,none": 7.684695399700504,
						"perplexity_stderr,none": 0.20929842195468237
					},
					"logiqa": {
						"acc,none": 0.2457757296466974,
						"acc_norm,none": 0.29493087557603687,
						"acc_norm_stderr,none": 0.01788624973410439,
						"acc_stderr,none": 0.016887410894296944,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.2525993448226748,
						"acc_stderr,none": 0.04202282990456397,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.22962962962962963,
						"acc_stderr,none": 0.03633384414073463,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.24342105263157895,
						"acc_stderr,none": 0.034923496688842384,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.32075471698113206,
						"acc_stderr,none": 0.028727502957880263,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.03745554791462457,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.047609522856952344,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.042295258468165044,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3236994219653179,
						"acc_stderr,none": 0.03567603799639171,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.04389869956808778,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322674,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.18723404255319148,
						"acc_stderr,none": 0.025501588341883607,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2719298245614035,
						"acc_stderr,none": 0.04185774424022056,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.23448275862068965,
						"acc_stderr,none": 0.035306258743465914,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.023517294335963276,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.040406101782088394,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036845,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2903225806451613,
						"acc_stderr,none": 0.025822106119415895,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.22167487684729065,
						"acc_stderr,none": 0.029225575892489614,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.23030303030303031,
						"acc_stderr,none": 0.03287666758603489,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.03358618145732524,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.27461139896373055,
						"acc_stderr,none": 0.032210245080411544,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.258974358974359,
						"acc_stderr,none": 0.022211106810061665,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2518518518518518,
						"acc_stderr,none": 0.02646611753895991,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2605042016806723,
						"acc_stderr,none": 0.028510251512341937,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.27155963302752295,
						"acc_stderr,none": 0.019069098363191445,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.029886910547626964,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.27941176470588236,
						"acc_stderr,none": 0.031493281045079556,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2489451476793249,
						"acc_stderr,none": 0.028146970599422644,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.16143497757847533,
						"acc_stderr,none": 0.024693957899128472,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.21374045801526717,
						"acc_stderr,none": 0.0359546161177469,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.24017003188097769,
						"acc_stderr,none": 0.02857393482131495,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.17355371900826447,
						"acc_stderr,none": 0.0345727283691767,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04186091791394607,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.25153374233128833,
						"acc_stderr,none": 0.034089978868575295,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.19642857142857142,
						"acc_stderr,none": 0.03770970049347019,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.39805825242718446,
						"acc_stderr,none": 0.04846748253977239,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2094017094017094,
						"acc_stderr,none": 0.026655699653922754,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621505,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.22349936143039592,
						"acc_stderr,none": 0.014897235229450707,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.21098265895953758,
						"acc_stderr,none": 0.021966309947043124,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2346368715083799,
						"acc_stderr,none": 0.014173044098303679,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.30718954248366015,
						"acc_stderr,none": 0.026415601914388992,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.25683939491470875,
						"acc_stderr,none": 0.05743915320464653,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2540192926045016,
						"acc_stderr,none": 0.02472386150477169,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.023132376234543346,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.24822695035460993,
						"acc_stderr,none": 0.025770015644290396,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.24967405475880053,
						"acc_stderr,none": 0.011054538377832327,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.25735294117647056,
						"acc_stderr,none": 0.026556519470041524,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.24183006535947713,
						"acc_stderr,none": 0.017322789207784326,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.24545454545454545,
						"acc_stderr,none": 0.041220665028782834,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2612244897959184,
						"acc_stderr,none": 0.028123429335142787,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.26454338641533964,
						"acc_stderr,none": 0.034586953407146494,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.3034825870646766,
						"acc_stderr,none": 0.03251006816458618,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.25531240088804313,
						"acc_stderr,none": 0.04558330291190535,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.21686746987951808,
						"acc_stderr,none": 0.03208284450356365,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.19883040935672514,
						"acc_stderr,none": 0.03061111655743253,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3502801833927662,
						"acc_stderr,none": 0.004815571260570184,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3463181448331977,
						"acc_stderr,none": 0.004798682211884212,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.37254901960784315,
						"acc_stderr,none": 0.02396538492671658,
						"alias": " - mrpc",
						"f1,none": 0.26011560693641617,
						"f1_stderr,none": 0.03106858780787724
					},
					"openbookqa": {
						"acc,none": 0.254,
						"acc_norm,none": 0.36,
						"acc_norm_stderr,none": 0.021487751089720522,
						"acc_stderr,none": 0.01948659680164338,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4845,
						"acc_stderr,none": 0.011177761232603322,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.456,
						"acc_stderr,none": 0.011139750761283311,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.533,
						"acc_stderr,none": 0.011158752568250675,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.011130400617630765,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.557,
						"acc_stderr,none": 0.011110230358066709,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.011174185930778305,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.011154111668060216,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5192857142857144,
						"acc_stderr,none": 0.029939594331147804,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7110990206746464,
						"acc_norm,none": 0.7132752992383025,
						"acc_norm_stderr,none": 0.010551314503108066,
						"acc_stderr,none": 0.010575111841364905,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7164605671706539,
						"acc_norm,none": 0.4995181848102748,
						"acc_norm_stderr,none": 0.008337220905567284,
						"acc_stderr,none": 0.14863316206902988,
						"alias": "pythia",
						"bits_per_byte,none": 0.719096605535433,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.646150916185073,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.055848874703582,
						"perplexity_stderr,none": 0.11854541385297362,
						"word_perplexity,none": 14.373441237489386,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.5052169137836353,
						"acc_stderr,none": 0.006765042284363289,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6368290873114024,
						"acc_stderr,none": 0.002391775841486003,
						"alias": " - qqp",
						"f1,none": 0.4003267306514192,
						"f1_stderr,none": 0.003952746364902292
					},
					"record": {
						"alias": "record",
						"em,none": 0.254,
						"em_stderr,none": 0.004353193658626019,
						"f1,none": 0.26163523828089236,
						"f1_stderr,none": 0.004364439540718011
					},
					"rte": {
						"acc,none": 0.51985559566787,
						"acc_stderr,none": 0.030072723167317184,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.897,
						"acc_norm,none": 0.853,
						"acc_norm_stderr,none": 0.011203415395160333,
						"acc_stderr,none": 0.009616833339695794,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.7568807339449541,
						"acc_stderr,none": 0.01453497656207427,
						"alias": " - sst2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.719096605535433,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.646150916185073,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 14.373441237489386,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.5911602209944752,
						"acc_stderr,none": 0.01381695429513568,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.6057692307692307,
						"acc_stderr,none": 0.04815154775990711,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.5787272727272728,
						"acc_stderr,none": 0.04424725212711732,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.02216263442665284,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928028,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.636,
						"acc_stderr,none": 0.021539170637317688,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.021513662527582404,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.562,
						"acc_stderr,none": 0.022210326363977417,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407056,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.022187215803029008,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.022221331534143036,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.02181430098478764,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.0213237286328075,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4044979919678715,
						"acc_stderr,none": 0.04620022346504284,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939166,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.42610441767068274,
						"acc_stderr,none": 0.009912016377459067,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.44859437751004017,
						"acc_stderr,none": 0.009968964736894263,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.37349397590361444,
						"acc_stderr,none": 0.00969598596221976,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5108433734939759,
						"acc_stderr,none": 0.010019715824483473,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4566265060240964,
						"acc_stderr,none": 0.009984293410840315,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.457429718875502,
						"acc_stderr,none": 0.009985682220227464,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3682730923694779,
						"acc_stderr,none": 0.009668013178998446,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4493975903614458,
						"acc_stderr,none": 0.009970615649588139,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337356,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.38473895582329315,
						"acc_stderr,none": 0.00975214930715253,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.39799196787148594,
						"acc_stderr,none": 0.009811284026425582,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3506024096385542,
						"acc_stderr,none": 0.009564237156206098,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.43052208835341366,
						"acc_stderr,none": 0.009924844537285524,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.342570281124498,
						"acc_stderr,none": 0.009512333319470373,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5785452138860477,
						"acc_stderr,none": 0.046882211406773226,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5373924553275976,
						"acc_stderr,none": 0.012831093347016556,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7200529450694904,
						"acc_stderr,none": 0.011553982180012723,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6293845135671741,
						"acc_stderr,none": 0.012428861084065901,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5334215751158173,
						"acc_stderr,none": 0.01283834793473167,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5407015221707479,
						"acc_stderr,none": 0.012824422739625585,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.614824619457313,
						"acc_stderr,none": 0.012523231571141184,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.49172733289212445,
						"acc_stderr,none": 0.012865364020375396,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6207809397749835,
						"acc_stderr,none": 0.012486070771171334,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5115817339510258,
						"acc_stderr,none": 0.012863672949335879,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5691594970218399,
						"acc_stderr,none": 0.012743443034698407,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5949702183984117,
						"acc_stderr,none": 0.01263288721875138,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.731175545066307,
						"acc_stderr,none": 0.04568831187382474,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8094623655913978,
						"acc_stderr,none": 0.008146492341553319,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.050066428050419214,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6068821689259646,
						"acc_stderr,none": 0.015780865040470965,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6577946768060836,
						"acc_stderr,none": 0.029311491114275143,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6507936507936508,
						"acc_stderr,none": 0.026902825537698707,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6984126984126984,
						"acc_stderr,none": 0.02046343784622378,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "SmerkyG/rwkv-5-world-1b5"
	},
	"SmerkyG/rwkv-5-world-3b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5715896279594137,
						"acc_norm,none": 0.547914317925592,
						"acc_norm_stderr,none": 0.08710699872372187,
						"acc_stderr,none": 0.1112932594915945,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.34375,
						"acc_stderr,none": 0.01498089438146567,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8391194029850746,
						"acc_stderr,none": 0.13667479341569325,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.25582800897945085,
						"acc_norm,none": 0.25582800897945085,
						"acc_norm_stderr,none": 0.040895157369724836,
						"acc_stderr,none": 0.040895157369724836,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.4627203430204859,
						"acc_stderr,none": 0.0005380462637720734,
						"alias": "glue",
						"f1,none": 0.568946811739787,
						"f1_stderr,none": 2.2312995793854187e-05,
						"mcc,none": 0.033287101248266296,
						"mcc_stderr,none": 0.030629996977615485
					},
					"lambada": {
						"acc,none": 0.6602949738016689,
						"acc_stderr,none": 0.014793125679769061,
						"alias": "lambada",
						"perplexity,none": 4.8106501773257575,
						"perplexity_stderr,none": 0.34536046768734835
					},
					"lambada_multilingual": {
						"acc,none": 0.4905880069862216,
						"acc_stderr,none": 0.07894533197381409,
						"alias": "lambada_multilingual",
						"perplexity,none": 30.76460753488348,
						"perplexity_stderr,none": 11.777858796703137
					},
					"mmlu": {
						"acc,none": 0.24711579547073068,
						"acc_stderr,none": 0.03715355989276938,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.25313496280552605,
						"acc_stderr,none": 0.03497821947589105,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.24460894753781784,
						"acc_stderr,none": 0.03601804484049794,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2378940526486838,
						"acc_stderr,none": 0.03335213570316462,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2496035521725341,
						"acc_stderr,none": 0.04361356583373247,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.5161428571428571,
						"acc_stderr,none": 0.023704221585676477,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7245880867684114,
						"acc_norm,none": 0.553548500434726,
						"acc_norm_stderr,none": 0.009738872292814053,
						"acc_stderr,none": 0.13585758218800814,
						"alias": "pythia",
						"bits_per_byte,none": 0.6806661601277508,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.602879709250417,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.155535158061581,
						"perplexity_stderr,none": 0.08982982248111576,
						"word_perplexity,none": 12.465153062612956,
						"word_perplexity_stderr,none": "N/A"
					},
					"xcopa": {
						"acc,none": 0.5901818181818181,
						"acc_stderr,none": 0.05974768968704812,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4234805890227577,
						"acc_stderr,none": 0.04766744487719166,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5985199446483365,
						"acc_stderr,none": 0.05803138987799236,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7698359181838615,
						"acc_stderr,none": 0.04105837672395794,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5715896279594137,
						"acc_norm,none": 0.547914317925592,
						"acc_norm_stderr,none": 0.08710699872372187,
						"acc_stderr,none": 0.1112932594915945,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.34375,
						"acc_stderr,none": 0.01498089438146567,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.346,
						"acc_stderr,none": 0.015050266127564436,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.351,
						"acc_stderr,none": 0.015100563798316407,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3358333333333333,
						"acc_stderr,none": 0.013639261190932879,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3361774744027304,
						"acc_norm,none": 0.3643344709897611,
						"acc_norm_stderr,none": 0.014063260279882413,
						"acc_stderr,none": 0.013804855026205761,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6877104377104377,
						"acc_norm,none": 0.6384680134680135,
						"acc_norm_stderr,none": 0.00985850654316206,
						"acc_stderr,none": 0.009509325983631453,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8391194029850746,
						"acc_stderr,none": 0.13667479341569325,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103282,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403638,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.00244335219932984,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816318,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.765,
						"acc_stderr,none": 0.013414729030247109,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.709,
						"acc_stderr,none": 0.014370995982377939,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.697,
						"acc_stderr,none": 0.014539683710535243,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.010811655372416051,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767667,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319422,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286409,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919297,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584931,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291603,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178327,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696856,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.762,
						"acc_stderr,none": 0.01347358666196722,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.812,
						"acc_stderr,none": 0.012361586015103773,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.011266140684632168,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866442,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357798,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298185,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.451,
						"acc_stderr,none": 0.01574315237958553,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996681,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.012559527926707382,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.013807775152234195,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.844,
						"acc_stderr,none": 0.011480235006122361,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.00751375115747492,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244075,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315155,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240643,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.015524980677122583,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.835,
						"acc_stderr,none": 0.011743632866916164,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.579,
						"acc_stderr,none": 0.015620595475301322,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.015356947477797584,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.014297146862517908,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837957,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.821,
						"acc_stderr,none": 0.012128730605719113,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.0096168333396958,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783224,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.764,
						"acc_stderr,none": 0.013434451402438681,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698462,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469417,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866444,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.845,
						"acc_stderr,none": 0.01145015747079947,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.472,
						"acc_stderr,none": 0.015794475789511476,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452374,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410043,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.01288666233227453,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.484,
						"acc_stderr,none": 0.015811198373114878,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357807,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333316,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.689,
						"acc_stderr,none": 0.014645596385722694,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904605,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847164,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.761,
						"acc_stderr,none": 0.01349300044693759,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.011628164696727191,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.00648892179842742,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919306,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318241,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275288,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.444,
						"acc_stderr,none": 0.01571976816340209,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.014987482264363935,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.25582800897945085,
						"acc_norm,none": 0.25582800897945085,
						"acc_norm_stderr,none": 0.040895157369724836,
						"acc_stderr,none": 0.040895157369724836,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.035386684903133896,
						"acc_stderr,none": 0.035386684903133896,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.21875,
						"acc_norm,none": 0.21875,
						"acc_norm_stderr,none": 0.032784644885244255,
						"acc_stderr,none": 0.032784644885244255,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.03401506715249039,
						"acc_stderr,none": 0.03401506715249039,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.22488038277511962,
						"acc_norm,none": 0.22488038277511962,
						"acc_norm_stderr,none": 0.02894866114032704,
						"acc_stderr,none": 0.02894866114032704,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865142,
						"acc_stderr,none": 0.03462157845865142,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.22137404580152673,
						"acc_norm,none": 0.22137404580152673,
						"acc_norm_stderr,none": 0.03641297081313732,
						"acc_stderr,none": 0.03641297081313732,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.22058823529411764,
						"acc_norm,none": 0.22058823529411764,
						"acc_norm_stderr,none": 0.03568681318274766,
						"acc_stderr,none": 0.03568681318274766,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.22429906542056074,
						"acc_norm,none": 0.22429906542056074,
						"acc_norm_stderr,none": 0.04051426427955261,
						"acc_stderr,none": 0.04051426427955261,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25386996904024767,
						"acc_norm,none": 0.25386996904024767,
						"acc_norm_stderr,none": 0.024254090252458067,
						"acc_stderr,none": 0.024254090252458067,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.03166009679399812,
						"acc_stderr,none": 0.03166009679399812,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.25139664804469275,
						"acc_norm,none": 0.25139664804469275,
						"acc_norm_stderr,none": 0.03251588837184109,
						"acc_stderr,none": 0.03251588837184109,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.29535864978902954,
						"acc_norm,none": 0.29535864978902954,
						"acc_norm_stderr,none": 0.029696338713422896,
						"acc_stderr,none": 0.029696338713422896,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004222,
						"acc_stderr,none": 0.04742907046004222,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.29245283018867924,
						"acc_norm,none": 0.29245283018867924,
						"acc_norm_stderr,none": 0.04439263906199628,
						"acc_stderr,none": 0.04439263906199628,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.24074074074074073,
						"acc_norm,none": 0.24074074074074073,
						"acc_norm_stderr,none": 0.0413311944024384,
						"acc_stderr,none": 0.0413311944024384,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.040842473153370994,
						"acc_stderr,none": 0.040842473153370994,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.22344322344322345,
						"acc_norm,none": 0.22344322344322345,
						"acc_norm_stderr,none": 0.025257231735255525,
						"acc_stderr,none": 0.025257231735255525,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604246,
						"acc_stderr,none": 0.030587591351604246,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2573099415204678,
						"acc_norm,none": 0.2573099415204678,
						"acc_norm_stderr,none": 0.03352799844161865,
						"acc_stderr,none": 0.03352799844161865,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.035589261576067566,
						"acc_stderr,none": 0.035589261576067566,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.23741007194244604,
						"acc_norm,none": 0.23741007194244604,
						"acc_norm_stderr,none": 0.036220593237998276,
						"acc_stderr,none": 0.036220593237998276,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.03329493246449381,
						"acc_stderr,none": 0.03329493246449381,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25766871165644173,
						"acc_norm,none": 0.25766871165644173,
						"acc_norm_stderr,none": 0.03436150827846917,
						"acc_stderr,none": 0.03436150827846917,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.03451628876250622,
						"acc_stderr,none": 0.03451628876250622,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.02747460833869742,
						"acc_stderr,none": 0.02747460833869742,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.02962022787479048,
						"acc_stderr,none": 0.02962022787479048,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2689075630252101,
						"acc_norm,none": 0.2689075630252101,
						"acc_norm_stderr,none": 0.028801392193631276,
						"acc_stderr,none": 0.028801392193631276,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2826086956521739,
						"acc_norm,none": 0.2826086956521739,
						"acc_norm_stderr,none": 0.029754528538233224,
						"acc_stderr,none": 0.029754528538233224,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.03712537833614867,
						"acc_stderr,none": 0.03712537833614867,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695624,
						"acc_stderr,none": 0.03737392962695624,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.23863636363636365,
						"acc_norm,none": 0.23863636363636365,
						"acc_norm_stderr,none": 0.03222147017899509,
						"acc_stderr,none": 0.03222147017899509,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2483221476510067,
						"acc_norm,none": 0.2483221476510067,
						"acc_norm_stderr,none": 0.03551344041697431,
						"acc_stderr,none": 0.03551344041697431,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516737,
						"acc_stderr,none": 0.03385633936516737,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.035717915564682706,
						"acc_stderr,none": 0.035717915564682706,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.03980329854920432,
						"acc_stderr,none": 0.03980329854920432,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.03494959016177541,
						"acc_stderr,none": 0.03494959016177541,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.04013964554072773,
						"acc_stderr,none": 0.04013964554072773,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.22377622377622378,
						"acc_norm,none": 0.22377622377622378,
						"acc_norm_stderr,none": 0.034974882883823395,
						"acc_stderr,none": 0.034974882883823395,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2698412698412698,
						"acc_norm,none": 0.2698412698412698,
						"acc_norm_stderr,none": 0.03970158273235173,
						"acc_stderr,none": 0.03970158273235173,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.033366051897610625,
						"acc_stderr,none": 0.033366051897610625,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2725060827250608,
						"acc_norm,none": 0.2725060827250608,
						"acc_norm_stderr,none": 0.02198927219610503,
						"acc_stderr,none": 0.02198927219610503,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2757009345794392,
						"acc_norm,none": 0.2757009345794392,
						"acc_norm_stderr,none": 0.030618808026055617,
						"acc_stderr,none": 0.030618808026055617,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.22764227642276422,
						"acc_norm,none": 0.22764227642276422,
						"acc_norm_stderr,none": 0.037962586241752624,
						"acc_stderr,none": 0.037962586241752624,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2540983606557377,
						"acc_norm,none": 0.2540983606557377,
						"acc_norm_stderr,none": 0.03957756102798664,
						"acc_stderr,none": 0.03957756102798664,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03058876451607487,
						"acc_stderr,none": 0.03058876451607487,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.03425177889602085,
						"acc_stderr,none": 0.03425177889602085,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2328042328042328,
						"acc_norm,none": 0.2328042328042328,
						"acc_norm_stderr,none": 0.030822624150702194,
						"acc_stderr,none": 0.030822624150702194,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.040832215386495736,
						"acc_stderr,none": 0.040832215386495736,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2827586206896552,
						"acc_norm,none": 0.2827586206896552,
						"acc_norm_stderr,none": 0.03752833958003337,
						"acc_stderr,none": 0.03752833958003337,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.04176466758604901,
						"acc_stderr,none": 0.04176466758604901,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.03288889734209821,
						"acc_stderr,none": 0.03288889734209821,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2796208530805687,
						"acc_norm,none": 0.2796208530805687,
						"acc_norm_stderr,none": 0.030971033440870904,
						"acc_stderr,none": 0.030971033440870904,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.24468085106382978,
						"acc_norm,none": 0.24468085106382978,
						"acc_norm_stderr,none": 0.022199827758281315,
						"acc_stderr,none": 0.022199827758281315,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.2543103448275862,
						"acc_norm,none": 0.2543103448275862,
						"acc_norm_stderr,none": 0.028652009240399654,
						"acc_stderr,none": 0.028652009240399654,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.25287356321839083,
						"acc_norm,none": 0.25287356321839083,
						"acc_norm_stderr,none": 0.0330465186437516,
						"acc_stderr,none": 0.0330465186437516,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2814814814814815,
						"acc_norm,none": 0.2814814814814815,
						"acc_norm_stderr,none": 0.03885004245800255,
						"acc_stderr,none": 0.03885004245800255,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.25663716814159293,
						"acc_norm,none": 0.25663716814159293,
						"acc_norm_stderr,none": 0.029118495998237293,
						"acc_stderr,none": 0.029118495998237293,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03346409881055953,
						"acc_stderr,none": 0.03346409881055953,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.0316293039569795,
						"acc_stderr,none": 0.0316293039569795,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2484472049689441,
						"acc_norm,none": 0.2484472049689441,
						"acc_norm_stderr,none": 0.0341614906832298,
						"acc_stderr,none": 0.0341614906832298,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.033287101248266296,
						"mcc_stderr,none": 0.030629996977615485
					},
					"copa": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.03588702812826371,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.4627203430204859,
						"acc_stderr,none": 0.0005380462637720734,
						"alias": "glue",
						"f1,none": 0.568946811739787,
						"f1_stderr,none": 2.2312995793854187e-05,
						"mcc,none": 0.033287101248266296,
						"mcc_stderr,none": 0.030629996977615485
					},
					"hellaswag": {
						"acc,none": 0.4705238000398327,
						"acc_norm,none": 0.625771758613822,
						"acc_norm_stderr,none": 0.004829339926388333,
						"acc_stderr,none": 0.004981103157940447,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.6602949738016689,
						"acc_stderr,none": 0.014793125679769061,
						"alias": "lambada",
						"perplexity,none": 4.8106501773257575,
						"perplexity_stderr,none": 0.34536046768734835
					},
					"lambada_multilingual": {
						"acc,none": 0.4905880069862216,
						"acc_stderr,none": 0.07894533197381409,
						"alias": "lambada_multilingual",
						"perplexity,none": 30.76460753488348,
						"perplexity_stderr,none": 11.777858796703137
					},
					"lambada_openai": {
						"acc,none": 0.6875606442848826,
						"acc_stderr,none": 0.006457292279746485,
						"alias": " - lambada_openai",
						"perplexity,none": 4.155535158061581,
						"perplexity_stderr,none": 0.08982982248111576
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.39705026198331067,
						"acc_stderr,none": 0.006816718684122085,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 47.82707465657873,
						"perplexity_stderr,none": 2.7879027564358196
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6867843974383854,
						"acc_stderr,none": 0.0064616581301303365,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.156753927993921,
						"perplexity_stderr,none": 0.0897699250588797
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.41024645837376283,
						"acc_stderr,none": 0.006852827058720168,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 43.223792503416625,
						"perplexity_stderr,none": 2.2537890369461473
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.49446924121870756,
						"acc_stderr,none": 0.006965551475495918,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 24.66077080145808,
						"perplexity_stderr,none": 1.2767533778483229
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.4643896759169416,
						"acc_stderr,none": 0.006948288151296134,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 33.954645784970026,
						"perplexity_stderr,none": 1.9014170337516603
					},
					"lambada_standard": {
						"acc,none": 0.6338055501649524,
						"acc_stderr,none": 0.006711907623691292,
						"alias": " - lambada_standard",
						"perplexity,none": 5.464492858596434,
						"perplexity_stderr,none": 0.1291799823213989
					},
					"logiqa": {
						"acc,none": 0.22119815668202766,
						"acc_norm,none": 0.27342549923195086,
						"acc_norm_stderr,none": 0.01748247454768128,
						"acc_stderr,none": 0.016279743532401664,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.24711579547073068,
						"acc_stderr,none": 0.03715355989276938,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768077,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.3111111111111111,
						"acc_stderr,none": 0.03999262876617722,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.24342105263157895,
						"acc_stderr,none": 0.034923496688842384,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2188679245283019,
						"acc_stderr,none": 0.025447863825108618,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2569444444444444,
						"acc_stderr,none": 0.03653946969442099,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2543352601156069,
						"acc_stderr,none": 0.0332055644308557,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.043898699568087785,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.23404255319148937,
						"acc_stderr,none": 0.027678452578212383,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21929824561403508,
						"acc_stderr,none": 0.03892431106518753,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.296551724137931,
						"acc_stderr,none": 0.038061426873099935,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24603174603174602,
						"acc_stderr,none": 0.022182037202948368,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.18253968253968253,
						"acc_stderr,none": 0.034550710191021475,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2129032258064516,
						"acc_stderr,none": 0.02328766512726853,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2561576354679803,
						"acc_stderr,none": 0.030712730070982592,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110175,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.26666666666666666,
						"acc_stderr,none": 0.03453131801885415,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.25252525252525254,
						"acc_stderr,none": 0.030954055470365907,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.22797927461139897,
						"acc_stderr,none": 0.03027690994517826,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2230769230769231,
						"acc_stderr,none": 0.021107730127243988,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23333333333333334,
						"acc_stderr,none": 0.025787874220959302,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.19747899159663865,
						"acc_stderr,none": 0.025859164122051456,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.23669724770642203,
						"acc_stderr,none": 0.01822407811729907,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.028353212866863448,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.03039153369274154,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2742616033755274,
						"acc_stderr,none": 0.029041333510598028,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.21973094170403587,
						"acc_stderr,none": 0.027790177064383595,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.22137404580152673,
						"acc_stderr,none": 0.03641297081313729,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.25313496280552605,
						"acc_stderr,none": 0.03497821947589105,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.33884297520661155,
						"acc_stderr,none": 0.04320767807536669,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.21296296296296297,
						"acc_stderr,none": 0.03957835471980979,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.27607361963190186,
						"acc_stderr,none": 0.03512385283705051,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841044,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.20388349514563106,
						"acc_stderr,none": 0.0398913985953177,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.24786324786324787,
						"acc_stderr,none": 0.028286324075564393,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2720306513409962,
						"acc_stderr,none": 0.015913367447500517,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.30057803468208094,
						"acc_stderr,none": 0.024685316867257803,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.21787709497206703,
						"acc_stderr,none": 0.013806211780732977,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2679738562091503,
						"acc_stderr,none": 0.02536060379624256,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.24460894753781784,
						"acc_stderr,none": 0.03601804484049794,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.26366559485530544,
						"acc_stderr,none": 0.02502553850053234,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2808641975308642,
						"acc_stderr,none": 0.025006469755799197,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25177304964539005,
						"acc_stderr,none": 0.025892151156709405,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.24902216427640156,
						"acc_stderr,none": 0.01104489226404077,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.16911764705882354,
						"acc_stderr,none": 0.022770868010113028,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.28431372549019607,
						"acc_stderr,none": 0.01824902441120766,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.04265792110940589,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.19591836734693877,
						"acc_stderr,none": 0.025409301953225678,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2378940526486838,
						"acc_stderr,none": 0.03335213570316462,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.24875621890547264,
						"acc_stderr,none": 0.030567675938916714,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2496035521725341,
						"acc_stderr,none": 0.04361356583373247,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.23493975903614459,
						"acc_stderr,none": 0.03300533186128922,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.03377310252209196,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.4192562404482934,
						"acc_stderr,none": 0.004980913696566601,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.4330756712774613,
						"acc_stderr,none": 0.0049974170342329035,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.5294117647058824,
						"acc_stderr,none": 0.02474116366703947,
						"alias": " - mrpc",
						"f1,none": 0.5384615384615384,
						"f1_stderr,none": 0.02953592477057466
					},
					"openbookqa": {
						"acc,none": 0.262,
						"acc_norm,none": 0.366,
						"acc_norm_stderr,none": 0.021564276850201614,
						"acc_stderr,none": 0.019684688820194723,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.011180899170152967,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.5005,
						"acc_stderr,none": 0.011183130429495192,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4765,
						"acc_stderr,none": 0.011170777418517833,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.011117724672834362,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5205,
						"acc_stderr,none": 0.011173732641806813,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.535,
						"acc_stderr,none": 0.011155703691943108,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5365,
						"acc_stderr,none": 0.011153298751334334,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5161428571428571,
						"acc_stderr,none": 0.023704221585676477,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7421109902067464,
						"acc_norm,none": 0.7372143634385201,
						"acc_norm_stderr,none": 0.010269354068140765,
						"acc_stderr,none": 0.01020695666205628,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7245880867684114,
						"acc_norm,none": 0.553548500434726,
						"acc_norm_stderr,none": 0.009738872292814053,
						"acc_stderr,none": 0.13585758218800814,
						"alias": "pythia",
						"bits_per_byte,none": 0.6806661601277508,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.602879709250417,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.155535158061581,
						"perplexity_stderr,none": 0.08982982248111576,
						"word_perplexity,none": 12.465153062612956,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.5041186161449753,
						"acc_stderr,none": 0.006765181024578747,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.4660153351471679,
						"acc_stderr,none": 0.0024809499153539104,
						"alias": " - qqp",
						"f1,none": 0.569210815125212,
						"f1_stderr,none": 0.0026439637870231624
					},
					"rte": {
						"acc,none": 0.6245487364620939,
						"acc_stderr,none": 0.02914777518082041,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.924,
						"acc_norm,none": 0.883,
						"acc_norm_stderr,none": 0.010169287802713329,
						"acc_stderr,none": 0.008384169266796386,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.7878440366972477,
						"acc_stderr,none": 0.013852835283565899,
						"alias": " - sst2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6806661601277508,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.602879709250417,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 12.465153062612956,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6219415943172849,
						"acc_stderr,none": 0.013628165460523242,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5070422535211268,
						"acc_stderr,none": 0.059755502635482904,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.6346153846153846,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.5901818181818181,
						"acc_stderr,none": 0.05974768968704812,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.020740596536488073,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.662,
						"acc_stderr,none": 0.021175665695209407,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.486,
						"acc_stderr,none": 0.022374298166353185,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.022221331534143022,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.022261697292270132,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.021874299301689257,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.654,
						"acc_stderr,none": 0.02129495127723464,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.662,
						"acc_stderr,none": 0.021175665695209407,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4234805890227577,
						"acc_stderr,none": 0.04766744487719166,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337342,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.43132530120481927,
						"acc_stderr,none": 0.009927090290379251,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.47630522088353416,
						"acc_stderr,none": 0.010010812905412062,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.40562248995983935,
						"acc_stderr,none": 0.009841918156163167,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5196787148594377,
						"acc_stderr,none": 0.010014307727112695,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4819277108433735,
						"acc_stderr,none": 0.01001552415662981,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4791164658634538,
						"acc_stderr,none": 0.010013327358568523,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.39036144578313253,
						"acc_stderr,none": 0.009778161879954578,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4811244979919679,
						"acc_stderr,none": 0.010014928901071309,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3674698795180723,
						"acc_stderr,none": 0.009663601903728026,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.40481927710843374,
						"acc_stderr,none": 0.009838809968433943,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.42690763052208835,
						"acc_stderr,none": 0.009914408828583412,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3566265060240964,
						"acc_stderr,none": 0.009601209437867972,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.43333333333333335,
						"acc_stderr,none": 0.009932588282324238,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3618473895582329,
						"acc_stderr,none": 0.00963191294489075,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5985199446483365,
						"acc_stderr,none": 0.05803138987799236,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5598941098610192,
						"acc_stderr,none": 0.01277447516071634,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7445400397088021,
						"acc_stderr,none": 0.011223207064267599,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6585043017868961,
						"acc_stderr,none": 0.01220347324121444,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5433487756452681,
						"acc_stderr,none": 0.012818676452481957,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5651886168100596,
						"acc_stderr,none": 0.012757297463352964,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6307081403044341,
						"acc_stderr,none": 0.012419685881273594,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5016545334215751,
						"acc_stderr,none": 0.012867054869163338,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6432825943084051,
						"acc_stderr,none": 0.01232748767711036,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5268034414295168,
						"acc_stderr,none": 0.012848623899505768,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.586366644606221,
						"acc_stderr,none": 0.012673714851823767,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6234281932495036,
						"acc_stderr,none": 0.01246891448965935,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7698359181838615,
						"acc_stderr,none": 0.04105837672395794,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8387096774193549,
						"acc_stderr,none": 0.007629426973745115,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6867469879518072,
						"acc_stderr,none": 0.051219942106581456,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6684045881126173,
						"acc_stderr,none": 0.015210420238218126,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7224334600760456,
						"acc_stderr,none": 0.027665074010286835,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6571428571428571,
						"acc_stderr,none": 0.026786851659200937,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.753968253968254,
						"acc_stderr,none": 0.019203841459246623,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "SmerkyG/rwkv-5-world-3b"
	},
	"SmerkyG/rwkv-5-world-7b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.624859075535513,
						"acc_norm,none": 0.6237316798196166,
						"acc_norm_stderr,none": 0.09140588016411445,
						"acc_stderr,none": 0.10866313811862532,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3590625,
						"acc_stderr,none": 0.017704453505961653,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8389402985074627,
						"acc_stderr,none": 0.14221250961386078,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.30357451217406306,
						"acc_norm,none": 0.30357451217406306,
						"acc_norm_stderr,none": 0.05580815942501653,
						"acc_stderr,none": 0.05580815942501653,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.5762863268222963,
						"acc_stderr,none": 0.014009880606053625,
						"alias": "glue",
						"f1,none": 0.6883018461843459,
						"f1_stderr,none": 0.0001975515575281144,
						"mcc,none": -0.02929206145132745,
						"mcc_stderr,none": 0.012926811166858133
					},
					"lambada": {
						"acc,none": 0.7140500679215991,
						"acc_stderr,none": 0.01580861275109021,
						"alias": "lambada",
						"perplexity,none": 3.8043457790444495,
						"perplexity_stderr,none": 0.22807736216039784
					},
					"lambada_multilingual": {
						"acc,none": 0.5373568794876771,
						"acc_stderr,none": 0.08485396843250168,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.000058641883392,
						"perplexity_stderr,none": 8.215000706142517
					},
					"mmlu": {
						"acc,none": 0.3320039880358923,
						"acc_stderr,none": 0.06082532488334742,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3256110520722636,
						"acc_stderr,none": 0.059599616018790984,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.37013196009011906,
						"acc_stderr,none": 0.05585574688367252,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.35359116022099446,
						"acc_stderr,none": 0.04823516181321706,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.28290516967967017,
						"acc_stderr,none": 0.05825152613536486,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.45671428571428574,
						"acc_stderr,none": 0.05326292303210276,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7436450386424761,
						"acc_norm,none": 0.6277400828170847,
						"acc_norm_stderr,none": 0.010306063670327702,
						"acc_stderr,none": 0.1382052888201035,
						"alias": "pythia",
						"bits_per_byte,none": 0.6325288887179478,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5502800869079052,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3741475128993352,
						"perplexity_stderr,none": 0.06615459908451708,
						"word_perplexity,none": 10.428191022549841,
						"word_perplexity_stderr,none": "N/A"
					},
					"xcopa": {
						"acc,none": 0.6218181818181818,
						"acc_stderr,none": 0.0711251373879857,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4404551539491299,
						"acc_stderr,none": 0.05079333798871789,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6329944046687925,
						"acc_stderr,none": 0.05999108613609885,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8040008990784446,
						"acc_stderr,none": 0.049644304458458105,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.624859075535513,
						"acc_norm,none": 0.6237316798196166,
						"acc_norm_stderr,none": 0.09140588016411445,
						"acc_stderr,none": 0.10866313811862532,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3590625,
						"acc_stderr,none": 0.017704453505961653,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.015356947477797579,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.015039986742055235,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.35333333333333333,
						"acc_stderr,none": 0.01380457216231493,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.39505119453924914,
						"acc_norm,none": 0.4308873720136519,
						"acc_norm_stderr,none": 0.014471133392642482,
						"acc_stderr,none": 0.01428589829293818,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7382154882154882,
						"acc_norm,none": 0.7188552188552189,
						"acc_norm_stderr,none": 0.009224735470287002,
						"acc_stderr,none": 0.009020523527210177,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8389402985074627,
						"acc_stderr,none": 0.14221250961386078,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074789,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.003845749574503004,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000143,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.831,
						"acc_stderr,none": 0.011856625977890127,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651506,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877364,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.607,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.013106173040661764,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042958,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469362,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256581,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.006199874066337078,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.00597215762238962,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452375,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280302,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611462,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.00833333333333335,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.012841374572096926,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.771,
						"acc_stderr,none": 0.0132941993266136,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.809,
						"acc_stderr,none": 0.012436787112179486,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.00828206451270417,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.011828605831454269,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319422,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.429,
						"acc_stderr,none": 0.015658997547870243,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.01101091459599244,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.01281855355784399,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.014683991951087966,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341676,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792947,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240636,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280308,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024944,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.01527525231651936,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936694,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.015797897758042762,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.607,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.01460648312734276,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280308,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357807,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662728,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695803,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.809,
						"acc_stderr,none": 0.012436787112179474,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140913,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578159,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796406,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366646,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.015786868759359012,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695459,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491108,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256567,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.731,
						"acc_stderr,none": 0.014029819522568198,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.475,
						"acc_stderr,none": 0.01579951342999602,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160326,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426095,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.633,
						"acc_stderr,none": 0.01524937846417175,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855738,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.012259457340938579,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235261,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286413,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333344,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306513,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.00556839357508137,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.401,
						"acc_stderr,none": 0.015506109745498318,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.015167928865407557,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.30357451217406306,
						"acc_norm,none": 0.30357451217406306,
						"acc_norm_stderr,none": 0.05580815942501653,
						"acc_stderr,none": 0.05580815942501653,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2958579881656805,
						"acc_norm,none": 0.2958579881656805,
						"acc_norm_stderr,none": 0.035214144124964784,
						"acc_stderr,none": 0.035214144124964784,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.30405405405405406,
						"acc_norm,none": 0.30405405405405406,
						"acc_norm_stderr,none": 0.03794062549620372,
						"acc_stderr,none": 0.03794062549620372,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.033047561588107864,
						"acc_stderr,none": 0.033047561588107864,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.40625,
						"acc_norm,none": 0.40625,
						"acc_norm_stderr,none": 0.03894932504400619,
						"acc_stderr,none": 0.03894932504400619,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03477691162163659,
						"acc_stderr,none": 0.03477691162163659,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3444976076555024,
						"acc_norm,none": 0.3444976076555024,
						"acc_norm_stderr,none": 0.03294948099678349,
						"acc_stderr,none": 0.03294948099678349,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2125,
						"acc_norm,none": 0.2125,
						"acc_norm_stderr,none": 0.03244189290245472,
						"acc_stderr,none": 0.03244189290245472,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.366412213740458,
						"acc_norm,none": 0.366412213740458,
						"acc_norm_stderr,none": 0.042258754519696386,
						"acc_stderr,none": 0.042258754519696386,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3161764705882353,
						"acc_norm,none": 0.3161764705882353,
						"acc_norm_stderr,none": 0.040019338846834944,
						"acc_stderr,none": 0.040019338846834944,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.04522350077382029,
						"acc_stderr,none": 0.04522350077382029,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.34365325077399383,
						"acc_norm,none": 0.34365325077399383,
						"acc_norm_stderr,none": 0.02646664923557932,
						"acc_stderr,none": 0.02646664923557932,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.03283472056108567,
						"acc_stderr,none": 0.03283472056108567,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.329608938547486,
						"acc_norm,none": 0.329608938547486,
						"acc_norm_stderr,none": 0.03523332230992218,
						"acc_stderr,none": 0.03523332230992218,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.28270042194092826,
						"acc_norm,none": 0.28270042194092826,
						"acc_norm_stderr,none": 0.02931281415395592,
						"acc_stderr,none": 0.02931281415395592,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800374,
						"acc_stderr,none": 0.04142972007800374,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004223,
						"acc_stderr,none": 0.04742907046004223,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3867924528301887,
						"acc_norm,none": 0.3867924528301887,
						"acc_norm_stderr,none": 0.04752784159123843,
						"acc_stderr,none": 0.04752784159123843,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04186091791394607,
						"acc_stderr,none": 0.04186091791394607,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.042324735320550415,
						"acc_stderr,none": 0.042324735320550415,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.042520162237633115,
						"acc_stderr,none": 0.042520162237633115,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2490842490842491,
						"acc_norm,none": 0.2490842490842491,
						"acc_norm_stderr,none": 0.026223115500506114,
						"acc_stderr,none": 0.026223115500506114,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3382352941176471,
						"acc_norm,none": 0.3382352941176471,
						"acc_norm_stderr,none": 0.03320574612945431,
						"acc_stderr,none": 0.03320574612945431,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.30994152046783624,
						"acc_norm,none": 0.30994152046783624,
						"acc_norm_stderr,none": 0.035469769593931624,
						"acc_stderr,none": 0.035469769593931624,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.03711513959675177,
						"acc_stderr,none": 0.03711513959675177,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.302158273381295,
						"acc_norm,none": 0.302158273381295,
						"acc_norm_stderr,none": 0.03908914479291562,
						"acc_stderr,none": 0.03908914479291562,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.34591194968553457,
						"acc_norm,none": 0.34591194968553457,
						"acc_norm_stderr,none": 0.037841848841408295,
						"acc_stderr,none": 0.037841848841408295,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.31901840490797545,
						"acc_norm,none": 0.31901840490797545,
						"acc_norm_stderr,none": 0.03661997551073836,
						"acc_stderr,none": 0.03661997551073836,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.0345162887625062,
						"acc_stderr,none": 0.0345162887625062,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2896825396825397,
						"acc_norm,none": 0.2896825396825397,
						"acc_norm_stderr,none": 0.02863192475336099,
						"acc_stderr,none": 0.02863192475336099,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03173071239071724,
						"acc_stderr,none": 0.03173071239071724,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.40336134453781514,
						"acc_norm,none": 0.40336134453781514,
						"acc_norm_stderr,none": 0.031866081214088314,
						"acc_stderr,none": 0.031866081214088314,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.22608695652173913,
						"acc_norm,none": 0.22608695652173913,
						"acc_norm_stderr,none": 0.02764178570724133,
						"acc_stderr,none": 0.02764178570724133,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.34265734265734266,
						"acc_norm,none": 0.34265734265734266,
						"acc_norm_stderr,none": 0.03982738177809643,
						"acc_stderr,none": 0.03982738177809643,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.03448901746724545,
						"acc_stderr,none": 0.03448901746724545,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.33557046979865773,
						"acc_norm,none": 0.33557046979865773,
						"acc_norm_stderr,none": 0.03881373830315734,
						"acc_stderr,none": 0.03881373830315734,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.03661433360410718,
						"acc_stderr,none": 0.03661433360410718,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3220338983050847,
						"acc_norm,none": 0.3220338983050847,
						"acc_norm_stderr,none": 0.04319782230261344,
						"acc_stderr,none": 0.04319782230261344,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.040693063197213754,
						"acc_stderr,none": 0.040693063197213754,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3006993006993007,
						"acc_norm,none": 0.3006993006993007,
						"acc_norm_stderr,none": 0.03848167949490064,
						"acc_stderr,none": 0.03848167949490064,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.042163702135578345,
						"acc_stderr,none": 0.042163702135578345,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336698,
						"acc_stderr,none": 0.03333068663336698,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.36627906976744184,
						"acc_norm,none": 0.36627906976744184,
						"acc_norm_stderr,none": 0.036843172681015855,
						"acc_stderr,none": 0.036843172681015855,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2798053527980535,
						"acc_norm,none": 0.2798053527980535,
						"acc_norm_stderr,none": 0.02216976172592782,
						"acc_stderr,none": 0.02216976172592782,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3878504672897196,
						"acc_norm,none": 0.3878504672897196,
						"acc_norm_stderr,none": 0.03338651735918192,
						"acc_stderr,none": 0.03338651735918192,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2601626016260163,
						"acc_norm,none": 0.2601626016260163,
						"acc_norm_stderr,none": 0.039720129754505354,
						"acc_stderr,none": 0.039720129754505354,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.319672131147541,
						"acc_norm,none": 0.319672131147541,
						"acc_norm_stderr,none": 0.04239540943837383,
						"acc_stderr,none": 0.04239540943837383,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.32857142857142857,
						"acc_norm,none": 0.32857142857142857,
						"acc_norm_stderr,none": 0.03248939796876841,
						"acc_stderr,none": 0.03248939796876841,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3111111111111111,
						"acc_norm,none": 0.3111111111111111,
						"acc_norm_stderr,none": 0.03460236918732731,
						"acc_stderr,none": 0.03460236918732731,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.32275132275132273,
						"acc_norm,none": 0.32275132275132273,
						"acc_norm_stderr,none": 0.03409802097064963,
						"acc_stderr,none": 0.03409802097064963,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.0399037253226882,
						"acc_stderr,none": 0.0399037253226882,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.296551724137931,
						"acc_norm,none": 0.296551724137931,
						"acc_norm_stderr,none": 0.038061426873099935,
						"acc_stderr,none": 0.038061426873099935,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.37142857142857144,
						"acc_norm,none": 0.37142857142857144,
						"acc_norm_stderr,none": 0.04738035414793429,
						"acc_stderr,none": 0.04738035414793429,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.0340385177358705,
						"acc_stderr,none": 0.0340385177358705,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27014218009478674,
						"acc_norm,none": 0.27014218009478674,
						"acc_norm_stderr,none": 0.030641194076293145,
						"acc_stderr,none": 0.030641194076293145,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2473404255319149,
						"acc_norm,none": 0.2473404255319149,
						"acc_norm_stderr,none": 0.022280822212812246,
						"acc_stderr,none": 0.022280822212812246,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.031924831026639656,
						"acc_stderr,none": 0.031924831026639656,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3390804597701149,
						"acc_norm,none": 0.3390804597701149,
						"acc_norm_stderr,none": 0.03599172203897236,
						"acc_stderr,none": 0.03599172203897236,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03944624162501116,
						"acc_stderr,none": 0.03944624162501116,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3274336283185841,
						"acc_norm,none": 0.3274336283185841,
						"acc_norm_stderr,none": 0.031285129400738305,
						"acc_stderr,none": 0.031285129400738305,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.03608541011573967,
						"acc_stderr,none": 0.03608541011573967,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2756756756756757,
						"acc_norm,none": 0.2756756756756757,
						"acc_norm_stderr,none": 0.03294252220324153,
						"acc_stderr,none": 0.03294252220324153,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3136094674556213,
						"acc_norm,none": 0.3136094674556213,
						"acc_norm_stderr,none": 0.03579526516456225,
						"acc_stderr,none": 0.03579526516456225,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.32298136645962733,
						"acc_norm,none": 0.32298136645962733,
						"acc_norm_stderr,none": 0.03696826370174651,
						"acc_stderr,none": 0.03696826370174651,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.036121818481912725,
						"acc_stderr,none": 0.036121818481912725,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.02929206145132745,
						"mcc_stderr,none": 0.012926811166858133
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.033799766898963086,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.5762863268222963,
						"acc_stderr,none": 0.014009880606053625,
						"alias": "glue",
						"f1,none": 0.6883018461843459,
						"f1_stderr,none": 0.0001975515575281144,
						"mcc,none": -0.02929206145132745,
						"mcc_stderr,none": 0.012926811166858133
					},
					"hellaswag": {
						"acc,none": 0.5263891655048795,
						"acc_norm,none": 0.7085241983668592,
						"acc_norm_stderr,none": 0.004535133886462043,
						"acc_stderr,none": 0.004982826916687145,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7140500679215991,
						"acc_stderr,none": 0.01580861275109021,
						"alias": "lambada",
						"perplexity,none": 3.8043457790444495,
						"perplexity_stderr,none": 0.22807736216039784
					},
					"lambada_multilingual": {
						"acc,none": 0.5373568794876771,
						"acc_stderr,none": 0.08485396843250168,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.000058641883392,
						"perplexity_stderr,none": 8.215000706142517
					},
					"lambada_openai": {
						"acc,none": 0.7420919852513099,
						"acc_stderr,none": 0.0060949951256529635,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3741475128993352,
						"perplexity_stderr,none": 0.06615459908451708
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42751795070832527,
						"acc_stderr,none": 0.0068923954478686475,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.40995706565969,
						"perplexity_stderr,none": 1.9198882405259308
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7432563555210557,
						"acc_stderr,none": 0.006085990070284605,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.376276908213185,
						"perplexity_stderr,none": 0.06624295795502655
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4486706772753736,
						"acc_stderr,none": 0.006929173919665489,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.08178023365845,
						"perplexity_stderr,none": 1.438828440779044
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5476421502037648,
						"acc_stderr,none": 0.006934283157219039,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.477136806072853,
						"perplexity_stderr,none": 0.8029953639024064
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5196972637298661,
						"acc_stderr,none": 0.006960570207731863,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.65514219581279,
						"perplexity_stderr,none": 1.1521232467165174
					},
					"lambada_standard": {
						"acc,none": 0.6850378420337667,
						"acc_stderr,none": 0.006471404446305815,
						"alias": " - lambada_standard",
						"perplexity,none": 4.2322364195412305,
						"perplexity_stderr,none": 0.08998782296210209
					},
					"logiqa": {
						"acc,none": 0.2457757296466974,
						"acc_norm,none": 0.28417818740399386,
						"acc_norm_stderr,none": 0.01769054268019077,
						"acc_stderr,none": 0.016887410894296958,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.3320039880358923,
						"acc_stderr,none": 0.06082532488334742,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34074074074074073,
						"acc_stderr,none": 0.040943762699967946,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.28289473684210525,
						"acc_stderr,none": 0.03665349695640767,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.36981132075471695,
						"acc_stderr,none": 0.029711421880107926,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3194444444444444,
						"acc_stderr,none": 0.03899073687357335,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117317,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768079,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.24855491329479767,
						"acc_stderr,none": 0.03295304696818318,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.04092563958237654,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421276,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3659574468085106,
						"acc_stderr,none": 0.03148955829745529,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.040493392977481404,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.296551724137931,
						"acc_stderr,none": 0.03806142687309993,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.25132275132275134,
						"acc_stderr,none": 0.022340482339643898,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.30952380952380953,
						"acc_stderr,none": 0.041349130183033156,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145632,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.38387096774193546,
						"acc_stderr,none": 0.02766618207553963,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2955665024630542,
						"acc_stderr,none": 0.03210494433751458,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.4727272727272727,
						"acc_stderr,none": 0.0389853160557942,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.37373737373737376,
						"acc_stderr,none": 0.03446897738659333,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.46113989637305697,
						"acc_stderr,none": 0.03597524411734578,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3153846153846154,
						"acc_stderr,none": 0.023559646983189932,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24444444444444444,
						"acc_stderr,none": 0.02620276653465215,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.31092436974789917,
						"acc_stderr,none": 0.030066761582977924,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23841059602649006,
						"acc_stderr,none": 0.03479185572599661,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3724770642201835,
						"acc_stderr,none": 0.0207283684576385,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.17592592592592593,
						"acc_stderr,none": 0.025967420958258533,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.46568627450980393,
						"acc_stderr,none": 0.03501038327635897,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.4388185654008439,
						"acc_stderr,none": 0.032302649315470375,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3721973094170404,
						"acc_stderr,none": 0.03244305283008732,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3511450381679389,
						"acc_stderr,none": 0.04186445163013751,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3256110520722636,
						"acc_stderr,none": 0.059599616018790984,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.34710743801652894,
						"acc_stderr,none": 0.04345724570292534,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04557239513497751,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.34355828220858897,
						"acc_stderr,none": 0.03731133519673892,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.042878587513404565,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.44660194174757284,
						"acc_stderr,none": 0.04922424153458933,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.405982905982906,
						"acc_stderr,none": 0.03217180182641086,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252605,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.44316730523627074,
						"acc_stderr,none": 0.017764085035348407,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.31213872832369943,
						"acc_stderr,none": 0.024946792225272314,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574898,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3431372549019608,
						"acc_stderr,none": 0.02718449890994161,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.37013196009011906,
						"acc_stderr,none": 0.05585574688367252,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3890675241157556,
						"acc_stderr,none": 0.027690337536485376,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.36728395061728397,
						"acc_stderr,none": 0.026822801759507894,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.26595744680851063,
						"acc_stderr,none": 0.026358065698880592,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2966101694915254,
						"acc_stderr,none": 0.011665946586082844,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.35661764705882354,
						"acc_stderr,none": 0.029097209568411945,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.33169934640522875,
						"acc_stderr,none": 0.01904748523936038,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.39090909090909093,
						"acc_stderr,none": 0.04673752333670238,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3183673469387755,
						"acc_stderr,none": 0.029822533793982066,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.35359116022099446,
						"acc_stderr,none": 0.04823516181321706,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.4228855721393035,
						"acc_stderr,none": 0.034932317774212816,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.28290516967967017,
						"acc_stderr,none": 0.05825152613536486,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.0371172519074075,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.38596491228070173,
						"acc_stderr,none": 0.03733756969066164,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3804381049414162,
						"acc_stderr,none": 0.004900736223664135,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3677786818551668,
						"acc_stderr,none": 0.004863276879922489,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7426470588235294,
						"acc_stderr,none": 0.021669984270659748,
						"alias": " - mrpc",
						"f1,none": 0.8372093023255814,
						"f1_stderr,none": 0.01565537525040386
					},
					"openbookqa": {
						"acc,none": 0.302,
						"acc_norm,none": 0.412,
						"acc_norm_stderr,none": 0.02203367799374087,
						"acc_stderr,none": 0.020553269174209184,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.399,
						"acc_stderr,none": 0.010952601505572451,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.385,
						"acc_stderr,none": 0.010883323176386975,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3725,
						"acc_stderr,none": 0.010813433320184794,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5395,
						"acc_stderr,none": 0.011148184426533283,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5205,
						"acc_stderr,none": 0.011173732641806813,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.484,
						"acc_stderr,none": 0.011177408788874896,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4965,
						"acc_stderr,none": 0.011182862030875934,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.45671428571428574,
						"acc_stderr,none": 0.05326292303210276,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7704026115342764,
						"acc_norm,none": 0.7725788900979326,
						"acc_norm_stderr,none": 0.00977985076784724,
						"acc_stderr,none": 0.009812682950815195,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7436450386424761,
						"acc_norm,none": 0.6277400828170847,
						"acc_norm_stderr,none": 0.010306063670327702,
						"acc_stderr,none": 0.1382052888201035,
						"alias": "pythia",
						"bits_per_byte,none": 0.6325288887179478,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5502800869079052,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3741475128993352,
						"perplexity_stderr,none": 0.06615459908451708,
						"word_perplexity,none": 10.428191022549841,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.4964305326743548,
						"acc_stderr,none": 0.006765238152075669,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6765767994063814,
						"acc_stderr,none": 0.002326470962150933,
						"alias": " - qqp",
						"f1,none": 0.6870123031260472,
						"f1_stderr,none": 0.0026013624809404605
					},
					"record": {
						"alias": "record",
						"em,none": 0.2523,
						"em_stderr,none": 0.004343542061010362,
						"f1,none": 0.26155857166051866,
						"f1_stderr,none": 0.004358518434111173
					},
					"rte": {
						"acc,none": 0.5956678700361011,
						"acc_stderr,none": 0.029540420517619723,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.955,
						"acc_norm,none": 0.93,
						"acc_norm_stderr,none": 0.00807249435832349,
						"acc_stderr,none": 0.006558812241406122,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9105504587155964,
						"acc_stderr,none": 0.009670122820901149,
						"alias": " - sst2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6325288887179478,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5502800869079052,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.428191022549841,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6740331491712708,
						"acc_stderr,none": 0.013173782636922185,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4225352112676056,
						"acc_stderr,none": 0.05903984205682581,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.04926646390821466,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6218181818181818,
						"acc_stderr,none": 0.0711251373879857,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.02191237788577997,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.02001121929807353,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.01992048320956607,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407053,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.021352091786223104,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.02035437548053008,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.020475118092988978,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4404551539491299,
						"acc_stderr,none": 0.05079333798871789,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.009460223484996469,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4759036144578313,
						"acc_stderr,none": 0.010010427753210668,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4843373493975904,
						"acc_stderr,none": 0.010017154458106754,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3927710843373494,
						"acc_stderr,none": 0.009788891787583067,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5441767068273092,
						"acc_stderr,none": 0.009982878443738404,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5008032128514056,
						"acc_stderr,none": 0.010022059935722385,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4907630522088353,
						"acc_stderr,none": 0.010020362530631355,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43815261044176707,
						"acc_stderr,none": 0.009945106474553727,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.491566265060241,
						"acc_stderr,none": 0.01002064706811417,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3863453815261044,
						"acc_stderr,none": 0.009759721337538354,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42088353413654617,
						"acc_stderr,none": 0.009895812914052199,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4598393574297189,
						"acc_stderr,none": 0.009989691810169673,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40883534136546185,
						"acc_stderr,none": 0.00985407806781078,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41004016064257026,
						"acc_stderr,none": 0.009858525713807862,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3674698795180723,
						"acc_stderr,none": 0.009663601903728034,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6329944046687925,
						"acc_stderr,none": 0.05999108613609885,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.599602911978822,
						"acc_stderr,none": 0.012609238175551166,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7796161482461945,
						"acc_stderr,none": 0.010666988429058735,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7088021178027796,
						"acc_stderr,none": 0.011691443511878192,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5651886168100596,
						"acc_stderr,none": 0.012757297463352968,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6048974189278623,
						"acc_stderr,none": 0.012580772976133262,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6624751819986764,
						"acc_stderr,none": 0.012168840221678027,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5466578424884183,
						"acc_stderr,none": 0.012810980537828155,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6915949702183984,
						"acc_stderr,none": 0.011884972073313783,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5592322964923891,
						"acc_stderr,none": 0.012776518586332792,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5969556585043018,
						"acc_stderr,none": 0.012622895215907707,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6479152878888154,
						"acc_stderr,none": 0.01229119826167458,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8040008990784446,
						"acc_stderr,none": 0.049644304458458105,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8602150537634409,
						"acc_stderr,none": 0.007193092732936881,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7108433734939759,
						"acc_stderr,none": 0.05006642805041921,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7559958289885297,
						"acc_stderr,none": 0.013876360379829233,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7604562737642585,
						"acc_stderr,none": 0.026368102510190856,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6571428571428571,
						"acc_stderr,none": 0.026786851659200923,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7658730158730159,
						"acc_stderr,none": 0.018880788485078293,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "SmerkyG/rwkv-5-world-7b"
	},
	"SmerkyG/rwkv5-world-7b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"lambada": {
						"acc,none": 0.7141470987774112,
						"acc_stderr,none": 0.015852858101128752,
						"alias": "lambada",
						"perplexity,none": 3.8043801729431963,
						"perplexity_stderr,none": 0.22812798039678603
					},
					"lambada_multilingual": {
						"acc,none": 0.5373568794876771,
						"acc_stderr,none": 0.08137850543669961,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.999595835155514,
						"perplexity_stderr,none": 8.072823751794548
					}
				},
				"results": {
					"lambada": {
						"acc,none": 0.7141470987774112,
						"acc_stderr,none": 0.015852858101128752,
						"alias": "lambada",
						"perplexity,none": 3.8043801729431963,
						"perplexity_stderr,none": 0.22812798039678603
					},
					"lambada_multilingual": {
						"acc,none": 0.5373568794876771,
						"acc_stderr,none": 0.08137850543669961,
						"alias": "lambada_multilingual",
						"perplexity,none": 20.999595835155514,
						"perplexity_stderr,none": 8.072823751794548
					},
					"lambada_openai": {
						"acc,none": 0.7432563555210557,
						"acc_stderr,none": 0.006085990070284611,
						"alias": " - lambada_openai",
						"perplexity,none": 3.376546033217004,
						"perplexity_stderr,none": 0.066245771836926
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42751795070832527,
						"acc_stderr,none": 0.0068923954478686475,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.40702593465138,
						"perplexity_stderr,none": 1.9141725017987234
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7432563555210557,
						"acc_stderr,none": 0.006085990070284611,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3765489927589742,
						"perplexity_stderr,none": 0.06624526125580474
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4486706772753736,
						"acc_stderr,none": 0.006929173919665481,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.081604214276275,
						"perplexity_stderr,none": 1.4321984974847726
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5476421502037648,
						"acc_stderr,none": 0.006934283157219039,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.47741160051334,
						"perplexity_stderr,none": 0.8057218543858691
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5196972637298661,
						"acc_stderr,none": 0.006960570207731857,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.655388433577585,
						"perplexity_stderr,none": 1.1506575126728078
					},
					"lambada_standard": {
						"acc,none": 0.6850378420337667,
						"acc_stderr,none": 0.006471404446305822,
						"alias": " - lambada_standard",
						"perplexity,none": 4.232214312669388,
						"perplexity_stderr,none": 0.09037455534867263
					},
					"piqa": {
						"acc,none": 0.7725788900979326,
						"acc_norm,none": 0.7725788900979326,
						"acc_norm_stderr,none": 0.009779850767847237,
						"acc_stderr,none": 0.00977985076784726,
						"alias": "piqa"
					}
				}
			}
		},
		"name": "SmerkyG/rwkv5-world-7b"
	},
	"SmerkyG/rwkv6-world-1b6": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5183201803833145,
						"acc_norm,none": 0.48759864712514095,
						"acc_norm_stderr,none": 0.07452537303817926,
						"acc_stderr,none": 0.10434087305690941,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.018724990659359127,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0039,
						"acc_stderr,none": 0.006217146565049461,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8373432835820896,
						"acc_stderr,none": 0.1468785341680392,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.22511144130757801,
						"acc_norm,none": 0.22511144130757801,
						"acc_norm_stderr,none": 0.10298594831022281,
						"acc_stderr,none": 0.10298594831022281,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.25392850975651887,
						"acc_norm,none": 0.25392850975651887,
						"acc_norm_stderr,none": 0.03743978360991105,
						"acc_stderr,none": 0.03743978360991105,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.318230098389982,
						"likelihood_diff_stderr,none": 0.48046133772614846,
						"pct_stereotype,none": 0.5608228980322003,
						"pct_stereotype_stderr,none": 0.07204406992669224
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0004921259842519685,
						"exact_match_stderr,none": 0.000492125984251961
					},
					"glue": {
						"acc,none": 0.49803477846593613,
						"acc_stderr,none": 0.00945506109642132,
						"alias": "glue",
						"f1,none": 0.5456061768821845,
						"f1_stderr,none": 0.0003102272617225067,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"kmmlu": {
						"acc,none": 0.1410337857349119,
						"acc_norm,none": 0.1410337857349119,
						"acc_norm_stderr,none": 0.049604637102365116,
						"acc_stderr,none": 0.049604637102365116,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.4981363736022802,
						"acc_norm,none": 0.486,
						"acc_norm_stderr,none": 0.000500609218436876,
						"acc_stderr,none": 0.04529957920734483,
						"alias": "kobest",
						"f1,none": 0.390690776277561,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6366194449835048,
						"acc_stderr,none": 0.017127266793369178,
						"alias": "lambada",
						"perplexity,none": 5.492392204136331,
						"perplexity_stderr,none": 0.43498172839354293
					},
					"lambada_cloze": {
						"acc,none": 0.04880652047351058,
						"acc_stderr,none": 0.01461867816261798,
						"alias": "lambada_cloze",
						"perplexity,none": 460.8615387915018,
						"perplexity_stderr,none": 17.71592946389816
					},
					"lambada_multilingual": {
						"acc,none": 0.468969532311275,
						"acc_stderr,none": 0.08021244747250739,
						"alias": "lambada_multilingual",
						"perplexity,none": 37.52833751758278,
						"perplexity_stderr,none": 14.412301174518149
					},
					"mmlu": {
						"acc,none": 0.23821392963965246,
						"acc_stderr,none": 0.03691756481961924,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2450584484590861,
						"acc_stderr,none": 0.027684867991237257,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.25716124879304797,
						"acc_stderr,none": 0.03879280037592875,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.22261943451413715,
						"acc_stderr,none": 0.031892950211247356,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.22454804947668885,
						"acc_stderr,none": 0.04516695818369077,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2772178850248403,
						"acc_norm,none": 0.2525002745510116,
						"acc_norm_stderr,none": 8.430321161813488e-05,
						"acc_stderr,none": 0.08190805224080097,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5092857142857142,
						"acc_stderr,none": 0.026347480893196357,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7177074337037033,
						"acc_norm,none": 0.4950699171244749,
						"acc_norm_stderr,none": 0.008233280981145237,
						"acc_stderr,none": 0.14555233221174574,
						"alias": "pythia",
						"bits_per_byte,none": 0.7098604202965154,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6356458619040524,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.665591769126226,
						"perplexity_stderr,none": 0.10689576694153269,
						"word_perplexity,none": 13.889700231774906,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.34397163120567376,
						"acc_norm,none": 0.3953900709219858,
						"acc_norm_stderr,none": 0.06452384655268414,
						"acc_stderr,none": 0.04603006083513485,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5380187015407141,
						"acc_stderr,none": 0.026637104525336262,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.2803044957760258,
						"acc_stderr,none": 0.0014974998548866016,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2692778457772338,
						"bleu_acc_stderr,none": 0.01552856663708729,
						"bleu_diff,none": -9.061930693874382,
						"bleu_diff_stderr,none": 0.7719429195586806,
						"bleu_max,none": 25.08065863163638,
						"bleu_max_stderr,none": 0.759005892708332,
						"rouge1_acc,none": 0.26560587515299877,
						"rouge1_acc_stderr,none": 0.015461027627253597,
						"rouge1_diff,none": -11.20350138154079,
						"rouge1_diff_stderr,none": 0.7955557578523819,
						"rouge1_max,none": 50.08099781706327,
						"rouge1_max_stderr,none": 0.8385516974706783,
						"rouge2_acc,none": 0.21909424724602203,
						"rouge2_acc_stderr,none": 0.014480038578757449,
						"rouge2_diff,none": -13.55573800474474,
						"rouge2_diff_stderr,none": 0.971632769701086,
						"rouge2_max,none": 33.712150727593624,
						"rouge2_max_stderr,none": 0.9684697654611896,
						"rougeL_acc,none": 0.2558139534883721,
						"rougeL_acc_stderr,none": 0.01527417621928335,
						"rougeL_diff,none": -11.592760333260497,
						"rougeL_diff_stderr,none": 0.8092485426227116,
						"rougeL_max,none": 47.29944986882853,
						"rougeL_max_stderr,none": 0.8472323940201789
					},
					"xcopa": {
						"acc,none": 0.5796363636363636,
						"acc_stderr,none": 0.052055661886820155,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.41416331994645245,
						"acc_stderr,none": 0.0478208968063563,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.578846038144516,
						"acc_stderr,none": 0.053471571298976166,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7491571139581928,
						"acc_stderr,none": 0.039580427285498064,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5183201803833145,
						"acc_norm,none": 0.48759864712514095,
						"acc_norm_stderr,none": 0.07452537303817926,
						"acc_stderr,none": 0.10434087305690941,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.018724990659359127,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.318,
						"acc_stderr,none": 0.014734079309311901,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.321,
						"acc_stderr,none": 0.014770821817934638,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.36083333333333334,
						"acc_stderr,none": 0.013869180252444867,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.2977815699658703,
						"acc_norm,none": 0.3310580204778157,
						"acc_norm_stderr,none": 0.013752062419817829,
						"acc_stderr,none": 0.01336308010724448,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.627104377104377,
						"acc_norm,none": 0.5648148148148148,
						"acc_norm_stderr,none": 0.010173216430370913,
						"acc_stderr,none": 0.009922743197129248,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0039,
						"acc_stderr,none": 0.006217146565049461,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.02,
						"acc_stderr,none": 0.0031312780858980625,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0145,
						"acc_stderr,none": 0.002673658397142748,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000143,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0035,
						"acc_stderr,none": 0.0013208888574315666,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000151,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0008676789587852494,
						"acc_stderr,none": 0.0006134085141343904,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8373432835820896,
						"acc_stderr,none": 0.1468785341680392,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823333,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346938,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.012510816141264376,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525037,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.013232501619085336,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.666,
						"acc_stderr,none": 0.014922019523732965,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.731,
						"acc_stderr,none": 0.014029819522568196,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341674,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045057,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078157,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099194,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452375,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584939,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487907,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.0047427305946567975,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378218,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.731,
						"acc_stderr,none": 0.014029819522568196,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.814,
						"acc_stderr,none": 0.01231079020841279,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992436,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151125,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.845,
						"acc_stderr,none": 0.011450157470799464,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426526,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.408,
						"acc_stderr,none": 0.015549205052920675,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942314,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.0114199130650987,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.01426700906103131,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.01070937396352803,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280309,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785138,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140924,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474918,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.557,
						"acc_stderr,none": 0.0157161699532041,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.011628164696727188,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.543,
						"acc_stderr,none": 0.01576069159013638,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.659,
						"acc_stderr,none": 0.014998131348402706,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.705,
						"acc_stderr,none": 0.014428554438445512,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704168,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.821,
						"acc_stderr,none": 0.012128730605719095,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248128,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621228,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.013699915608779773,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410057,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426566,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235253,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.013232501619085337,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.015814743314581818,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.971,
						"acc_stderr,none": 0.005309160685756974,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523738,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745029993,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.014205696104091505,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.451,
						"acc_stderr,none": 0.015743152379585536,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504415,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525078,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.014658474370509007,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996681,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653892,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.764,
						"acc_stderr,none": 0.013434451402438667,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341674,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697079,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333366,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319418,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698453,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.486,
						"acc_stderr,none": 0.01581309754773099,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.368,
						"acc_stderr,none": 0.015258073561521803,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6238532110091743,
						"acc_stderr,none": 0.008472516562330718,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.39285714285714285,
						"acc_stderr,none": 0.0658538889806635,
						"alias": "cb",
						"f1,none": 0.22956521739130434,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.22511144130757801,
						"acc_norm,none": 0.22511144130757801,
						"acc_norm_stderr,none": 0.10298594831022281,
						"acc_stderr,none": 0.10298594831022281,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.062069005411206336,
						"acc_stderr,none": 0.062069005411206336,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.3939393939393939,
						"acc_norm,none": 0.3939393939393939,
						"acc_norm_stderr,none": 0.08637692614387409,
						"acc_stderr,none": 0.08637692614387409,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757577,
						"acc_stderr,none": 0.07575757575757577,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.06429065810876616,
						"acc_stderr,none": 0.06429065810876616,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.05781449705557245,
						"acc_stderr,none": 0.05781449705557245,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.1724137931034483,
						"acc_norm,none": 0.1724137931034483,
						"acc_norm_stderr,none": 0.0713860923457608,
						"acc_stderr,none": 0.0713860923457608,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.06861056852129647,
						"acc_stderr,none": 0.06861056852129647,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031762,
						"acc_stderr,none": 0.07633651333031762,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.13636363636363635,
						"acc_norm,none": 0.13636363636363635,
						"acc_norm_stderr,none": 0.07488677009526491,
						"acc_stderr,none": 0.07488677009526491,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.14285714285714285,
						"acc_norm,none": 0.14285714285714285,
						"acc_norm_stderr,none": 0.07824607964359517,
						"acc_stderr,none": 0.07824607964359517,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.13636363636363635,
						"acc_norm,none": 0.13636363636363635,
						"acc_norm_stderr,none": 0.0748867700952649,
						"acc_stderr,none": 0.0748867700952649,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.08742975048915692,
						"acc_stderr,none": 0.08742975048915692,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736841,
						"acc_stderr,none": 0.05263157894736841,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120631,
						"acc_stderr,none": 0.06206900541120631,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.06390760676613885,
						"acc_stderr,none": 0.06390760676613885,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.06545849153992007,
						"acc_stderr,none": 0.06545849153992007,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.25392850975651887,
						"acc_norm,none": 0.25392850975651887,
						"acc_norm_stderr,none": 0.03743978360991105,
						"acc_stderr,none": 0.03743978360991105,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03571428571428571,
						"acc_stderr,none": 0.03571428571428571,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.0344500028917346,
						"acc_stderr,none": 0.0344500028917346,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.0346215784586514,
						"acc_stderr,none": 0.0346215784586514,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.03401506715249039,
						"acc_stderr,none": 0.03401506715249039,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2535885167464115,
						"acc_norm,none": 0.2535885167464115,
						"acc_norm_stderr,none": 0.030166316298847997,
						"acc_stderr,none": 0.030166316298847997,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865142,
						"acc_stderr,none": 0.03462157845865142,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.22900763358778625,
						"acc_norm,none": 0.22900763358778625,
						"acc_norm_stderr,none": 0.036853466317118506,
						"acc_stderr,none": 0.036853466317118506,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.22058823529411764,
						"acc_norm,none": 0.22058823529411764,
						"acc_norm_stderr,none": 0.03568681318274768,
						"acc_stderr,none": 0.03568681318274768,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.29906542056074764,
						"acc_norm,none": 0.29906542056074764,
						"acc_norm_stderr,none": 0.044470182376718334,
						"acc_stderr,none": 0.044470182376718334,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.22910216718266255,
						"acc_norm,none": 0.22910216718266255,
						"acc_norm_stderr,none": 0.023419902096457838,
						"acc_stderr,none": 0.023419902096457838,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.27450980392156865,
						"acc_norm,none": 0.27450980392156865,
						"acc_norm_stderr,none": 0.03132179803083291,
						"acc_stderr,none": 0.03132179803083291,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.25139664804469275,
						"acc_norm,none": 0.25139664804469275,
						"acc_norm_stderr,none": 0.03251588837184109,
						"acc_stderr,none": 0.03251588837184109,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422644,
						"acc_stderr,none": 0.028146970599422644,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800373,
						"acc_stderr,none": 0.04142972007800373,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3364485981308411,
						"acc_norm,none": 0.3364485981308411,
						"acc_norm_stderr,none": 0.045892711114716274,
						"acc_stderr,none": 0.045892711114716274,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.29245283018867924,
						"acc_norm,none": 0.29245283018867924,
						"acc_norm_stderr,none": 0.04439263906199628,
						"acc_stderr,none": 0.04439263906199628,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.24074074074074073,
						"acc_norm,none": 0.24074074074074073,
						"acc_norm_stderr,none": 0.04133119440243839,
						"acc_stderr,none": 0.04133119440243839,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.04117581097845101,
						"acc_stderr,none": 0.04117581097845101,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.025825054502221036,
						"acc_stderr,none": 0.025825054502221036,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.27450980392156865,
						"acc_norm,none": 0.27450980392156865,
						"acc_norm_stderr,none": 0.031321798030832924,
						"acc_stderr,none": 0.031321798030832924,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.23976608187134502,
						"acc_norm,none": 0.23976608187134502,
						"acc_norm_stderr,none": 0.03274485211946956,
						"acc_stderr,none": 0.03274485211946956,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.25170068027210885,
						"acc_norm,none": 0.25170068027210885,
						"acc_norm_stderr,none": 0.035917280137616484,
						"acc_stderr,none": 0.035917280137616484,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2589928057553957,
						"acc_norm,none": 0.2589928057553957,
						"acc_norm_stderr,none": 0.03729198658164233,
						"acc_stderr,none": 0.03729198658164233,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.25157232704402516,
						"acc_norm,none": 0.25157232704402516,
						"acc_norm_stderr,none": 0.034520558111649044,
						"acc_stderr,none": 0.034520558111649044,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25153374233128833,
						"acc_norm,none": 0.25153374233128833,
						"acc_norm_stderr,none": 0.034089978868575295,
						"acc_stderr,none": 0.034089978868575295,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.03336605189761063,
						"acc_stderr,none": 0.03336605189761063,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.27380952380952384,
						"acc_norm,none": 0.27380952380952384,
						"acc_norm_stderr,none": 0.02814574111568384,
						"acc_stderr,none": 0.02814574111568384,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.25252525252525254,
						"acc_norm,none": 0.25252525252525254,
						"acc_norm_stderr,none": 0.03095405547036592,
						"acc_stderr,none": 0.03095405547036592,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.28991596638655465,
						"acc_norm,none": 0.28991596638655465,
						"acc_norm_stderr,none": 0.029472485833136084,
						"acc_stderr,none": 0.029472485833136084,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2826086956521739,
						"acc_norm,none": 0.2826086956521739,
						"acc_norm_stderr,none": 0.02975452853823326,
						"acc_stderr,none": 0.02975452853823326,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.22962962962962963,
						"acc_norm,none": 0.22962962962962963,
						"acc_norm_stderr,none": 0.036333844140734636,
						"acc_stderr,none": 0.036333844140734636,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.24475524475524477,
						"acc_norm,none": 0.24475524475524477,
						"acc_norm_stderr,none": 0.03607993033081378,
						"acc_stderr,none": 0.03607993033081378,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.23863636363636365,
						"acc_norm,none": 0.23863636363636365,
						"acc_norm_stderr,none": 0.03222147017899509,
						"acc_stderr,none": 0.03222147017899509,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2550335570469799,
						"acc_norm,none": 0.2550335570469799,
						"acc_norm_stderr,none": 0.03582912165111174,
						"acc_stderr,none": 0.03582912165111174,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.033341501981019636,
						"acc_stderr,none": 0.033341501981019636,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.038206998148497956,
						"acc_stderr,none": 0.038206998148497956,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714262,
						"acc_stderr,none": 0.04025566684714262,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.033346454086653377,
						"acc_stderr,none": 0.033346454086653377,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2636363636363636,
						"acc_norm,none": 0.2636363636363636,
						"acc_norm_stderr,none": 0.04220224692971987,
						"acc_stderr,none": 0.04220224692971987,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.25874125874125875,
						"acc_norm,none": 0.25874125874125875,
						"acc_norm_stderr,none": 0.036751374389002375,
						"acc_stderr,none": 0.036751374389002375,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.039325376803928724,
						"acc_stderr,none": 0.039325376803928724,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.03186439492581517,
						"acc_stderr,none": 0.03186439492581517,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.23255813953488372,
						"acc_norm,none": 0.23255813953488372,
						"acc_norm_stderr,none": 0.032306540832034505,
						"acc_stderr,none": 0.032306540832034505,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25060827250608275,
						"acc_norm,none": 0.25060827250608275,
						"acc_norm_stderr,none": 0.021402288814095338,
						"acc_stderr,none": 0.021402288814095338,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.24299065420560748,
						"acc_norm,none": 0.24299065420560748,
						"acc_norm_stderr,none": 0.02938702375433312,
						"acc_stderr,none": 0.02938702375433312,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03887917804888516,
						"acc_stderr,none": 0.03887917804888516,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2540983606557377,
						"acc_norm,none": 0.2540983606557377,
						"acc_norm_stderr,none": 0.03957756102798664,
						"acc_stderr,none": 0.03957756102798664,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24285714285714285,
						"acc_norm,none": 0.24285714285714285,
						"acc_norm_stderr,none": 0.029661370413965826,
						"acc_stderr,none": 0.029661370413965826,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.033052823437368754,
						"acc_stderr,none": 0.033052823437368754,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871163,
						"acc_stderr,none": 0.03152480234871163,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.23275862068965517,
						"acc_norm,none": 0.23275862068965517,
						"acc_norm_stderr,none": 0.039406691683377,
						"acc_stderr,none": 0.039406691683377,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2620689655172414,
						"acc_norm,none": 0.2620689655172414,
						"acc_norm_stderr,none": 0.036646663372252565,
						"acc_stderr,none": 0.036646663372252565,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.04117581097845101,
						"acc_stderr,none": 0.04117581097845101,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.0328888973420982,
						"acc_stderr,none": 0.0328888973420982,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.24644549763033174,
						"acc_norm,none": 0.24644549763033174,
						"acc_norm_stderr,none": 0.02973775172659684,
						"acc_stderr,none": 0.02973775172659684,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.26595744680851063,
						"acc_norm,none": 0.26595744680851063,
						"acc_norm_stderr,none": 0.022816607010135298,
						"acc_stderr,none": 0.022816607010135298,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.23706896551724138,
						"acc_norm,none": 0.23706896551724138,
						"acc_norm_stderr,none": 0.027981694008624977,
						"acc_stderr,none": 0.027981694008624977,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2471264367816092,
						"acc_norm,none": 0.2471264367816092,
						"acc_norm_stderr,none": 0.03279424038543968,
						"acc_stderr,none": 0.03279424038543968,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.23703703703703705,
						"acc_norm,none": 0.23703703703703705,
						"acc_norm_stderr,none": 0.03673731683969506,
						"acc_stderr,none": 0.03673731683969506,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.2610619469026549,
						"acc_norm,none": 0.2610619469026549,
						"acc_norm_stderr,none": 0.02928090821163171,
						"acc_stderr,none": 0.02928090821163171,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.033464098810559534,
						"acc_stderr,none": 0.033464098810559534,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.03162930395697947,
						"acc_stderr,none": 0.03162930395697947,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331528,
						"acc_stderr,none": 0.03360300796331528,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2732919254658385,
						"acc_norm,none": 0.2732919254658385,
						"acc_norm_stderr,none": 0.0352316839773709,
						"acc_stderr,none": 0.0352316839773709,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.30625,
						"acc_norm,none": 0.30625,
						"acc_norm_stderr,none": 0.0365545115043377,
						"acc_stderr,none": 0.0365545115043377,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.04163331998932261,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.318230098389982,
						"likelihood_diff_stderr,none": 0.48046133772614846,
						"pct_stereotype,none": 0.5608228980322003,
						"pct_stereotype_stderr,none": 0.07204406992669224
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.4473017292784736,
						"likelihood_diff_stderr,none": 0.08695488009601655,
						"pct_stereotype,none": 0.5933214072748957,
						"pct_stereotype_stderr,none": 0.011998685164249638
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.864010989010989,
						"likelihood_diff_stderr,none": 0.41516050281788774,
						"pct_stereotype,none": 0.7142857142857143,
						"pct_stereotype_stderr,none": 0.04761904761904759
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.090909090909091,
						"likelihood_diff_stderr,none": 1.8989449006059103,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.934615384615385,
						"likelihood_diff_stderr,none": 0.6205920744974063,
						"pct_stereotype,none": 0.7076923076923077,
						"pct_stereotype_stderr,none": 0.056852867304209534
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.444921875,
						"likelihood_diff_stderr,none": 0.16578603835010605,
						"pct_stereotype,none": 0.578125,
						"pct_stereotype_stderr,none": 0.027650782660529012
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.0590277777777777,
						"likelihood_diff_stderr,none": 0.23109494805803146,
						"pct_stereotype,none": 0.5370370370370371,
						"pct_stereotype_stderr,none": 0.03400603625538272
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.7743055555555554,
						"likelihood_diff_stderr,none": 0.3497868413273469,
						"pct_stereotype,none": 0.6944444444444444,
						"pct_stereotype_stderr,none": 0.05466818705978919
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.3481791338582676,
						"likelihood_diff_stderr,none": 0.14898932473442675,
						"pct_stereotype,none": 0.5098425196850394,
						"pct_stereotype_stderr,none": 0.02220147678894261
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.5236486486486487,
						"likelihood_diff_stderr,none": 0.3578579743920463,
						"pct_stereotype,none": 0.7207207207207207,
						"pct_stereotype_stderr,none": 0.0427766252488144
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.438172043010753,
						"likelihood_diff_stderr,none": 0.4280738521720646,
						"pct_stereotype,none": 0.8172043010752689,
						"pct_stereotype_stderr,none": 0.04029530010615517
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 3.9763157894736842,
						"likelihood_diff_stderr,none": 0.24154022106449718,
						"pct_stereotype,none": 0.5894736842105263,
						"pct_stereotype_stderr,none": 0.03578259307784409
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.1895125223613596,
						"likelihood_diff_stderr,none": 0.07484714558124682,
						"pct_stereotype,none": 0.5277280858676208,
						"pct_stereotype_stderr,none": 0.012194504446502623
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 2.9930555555555554,
						"likelihood_diff_stderr,none": 0.29468317795738513,
						"pct_stereotype,none": 0.4888888888888889,
						"pct_stereotype_stderr,none": 0.05298680599073449
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 1.6923076923076923,
						"likelihood_diff_stderr,none": 0.5822933812555691,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.757575757575758,
						"likelihood_diff_stderr,none": 0.4268784654567139,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.05615974350262316
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.8489096573208723,
						"likelihood_diff_stderr,none": 0.14500890805366037,
						"pct_stereotype,none": 0.49221183800623053,
						"pct_stereotype_stderr,none": 0.027947458769356347
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.450592885375494,
						"likelihood_diff_stderr,none": 0.18831740541618544,
						"pct_stereotype,none": 0.383399209486166,
						"pct_stereotype_stderr,none": 0.030628616122857773
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.0972222222222223,
						"likelihood_diff_stderr,none": 0.4329719160126238,
						"pct_stereotype,none": 0.625,
						"pct_stereotype_stderr,none": 0.05745481997211521
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.8796195652173915,
						"likelihood_diff_stderr,none": 0.1476952978468876,
						"pct_stereotype,none": 0.4369565217391304,
						"pct_stereotype_stderr,none": 0.023151745316873383
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.409782608695652,
						"likelihood_diff_stderr,none": 0.265448272977312,
						"pct_stereotype,none": 0.7043478260869566,
						"pct_stereotype_stderr,none": 0.04273972288221526
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.3626373626373627,
						"likelihood_diff_stderr,none": 0.3180148603574629,
						"pct_stereotype,none": 0.7802197802197802,
						"pct_stereotype_stderr,none": 0.04364972632898534
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.6256377551020407,
						"likelihood_diff_stderr,none": 0.23597412114986113,
						"pct_stereotype,none": 0.6836734693877551,
						"pct_stereotype_stderr,none": 0.03330234893102004
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0004921259842519685,
						"exact_match_stderr,none": 0.000492125984251961
					},
					"glue": {
						"acc,none": 0.49803477846593613,
						"acc_stderr,none": 0.00945506109642132,
						"alias": "glue",
						"f1,none": 0.5456061768821845,
						"f1_stderr,none": 0.0003102272617225067,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.4371639115714001,
						"acc_norm,none": 0.5730930093606851,
						"acc_norm_stderr,none": 0.004936176784631952,
						"acc_stderr,none": 0.004950221546187573,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.1410337857349119,
						"acc_norm,none": 0.1410337857349119,
						"acc_norm_stderr,none": 0.049604637102365116,
						"acc_stderr,none": 0.049604637102365116,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.03861229196653697,
						"acc_stderr,none": 0.03861229196653697,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.107,
						"acc_norm,none": 0.107,
						"acc_norm_stderr,none": 0.009779910359847165,
						"acc_stderr,none": 0.009779910359847165,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.131,
						"acc_norm,none": 0.131,
						"acc_norm_stderr,none": 0.010674874844837956,
						"acc_stderr,none": 0.010674874844837956,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.012997843819031811,
						"acc_stderr,none": 0.012997843819031811,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.012886662332274559,
						"acc_stderr,none": 0.012886662332274559,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.18166666666666667,
						"acc_norm,none": 0.18166666666666667,
						"acc_norm_stderr,none": 0.015753945309122375,
						"acc_stderr,none": 0.015753945309122375,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.063,
						"acc_norm,none": 0.063,
						"acc_norm_stderr,none": 0.007687007876286429,
						"acc_stderr,none": 0.007687007876286429,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.09,
						"acc_norm,none": 0.09,
						"acc_norm_stderr,none": 0.009054390204866435,
						"acc_stderr,none": 0.009054390204866435,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.058,
						"acc_norm,none": 0.058,
						"acc_norm_stderr,none": 0.007395315455792944,
						"acc_stderr,none": 0.007395315455792944,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.02912242397001744,
						"acc_stderr,none": 0.02912242397001744,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.113,
						"acc_norm,none": 0.113,
						"acc_norm_stderr,none": 0.010016552866696829,
						"acc_stderr,none": 0.010016552866696829,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.27692307692307694,
						"acc_norm,none": 0.27692307692307694,
						"acc_norm_stderr,none": 0.03939825345266469,
						"acc_stderr,none": 0.03939825345266469,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.065,
						"acc_norm,none": 0.065,
						"acc_norm_stderr,none": 0.007799733061832003,
						"acc_stderr,none": 0.007799733061832003,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.095,
						"acc_norm,none": 0.095,
						"acc_norm_stderr,none": 0.009276910103103338,
						"acc_stderr,none": 0.009276910103103338,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.012155153135511952,
						"acc_stderr,none": 0.012155153135511952,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.053,
						"acc_norm,none": 0.053,
						"acc_norm_stderr,none": 0.0070881056172464405,
						"acc_stderr,none": 0.0070881056172464405,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.172,
						"acc_norm,none": 0.172,
						"acc_norm_stderr,none": 0.011939788882495321,
						"acc_stderr,none": 0.011939788882495321,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.183,
						"acc_norm,none": 0.183,
						"acc_norm_stderr,none": 0.012233587399477823,
						"acc_stderr,none": 0.012233587399477823,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.117,
						"acc_norm,none": 0.117,
						"acc_norm_stderr,none": 0.010169287802713329,
						"acc_stderr,none": 0.010169287802713329,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.121,
						"acc_norm,none": 0.121,
						"acc_norm_stderr,none": 0.010318210380946094,
						"acc_stderr,none": 0.010318210380946094,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909284,
						"acc_stderr,none": 0.04292346959909284,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.094,
						"acc_norm,none": 0.094,
						"acc_norm_stderr,none": 0.009233052000787738,
						"acc_stderr,none": 0.009233052000787738,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.11,
						"acc_norm,none": 0.11,
						"acc_norm_stderr,none": 0.009899393819724437,
						"acc_stderr,none": 0.009899393819724437,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.111,
						"acc_norm,none": 0.111,
						"acc_norm_stderr,none": 0.009938701010583726,
						"acc_stderr,none": 0.009938701010583726,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.227,
						"acc_norm,none": 0.227,
						"acc_norm_stderr,none": 0.01325317496476393,
						"acc_stderr,none": 0.01325317496476393,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.135,
						"acc_norm,none": 0.135,
						"acc_norm_stderr,none": 0.010811655372416053,
						"acc_stderr,none": 0.010811655372416053,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.012655439943366667,
						"acc_stderr,none": 0.012655439943366667,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.01618832201360066,
						"acc_stderr,none": 0.01618832201360066,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.145,
						"acc_norm,none": 0.145,
						"acc_norm_stderr,none": 0.011139977517890155,
						"acc_stderr,none": 0.011139977517890155,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.01129723982340931,
						"acc_stderr,none": 0.01129723982340931,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.121,
						"acc_norm,none": 0.121,
						"acc_norm_stderr,none": 0.010318210380946088,
						"acc_stderr,none": 0.010318210380946088,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.135,
						"acc_norm,none": 0.135,
						"acc_norm_stderr,none": 0.010811655372416051,
						"acc_stderr,none": 0.010811655372416051,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.02395648228514077,
						"acc_stderr,none": 0.02395648228514077,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.013314551335935941,
						"acc_stderr,none": 0.013314551335935941,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.089,
						"acc_norm,none": 0.089,
						"acc_norm_stderr,none": 0.009008893392651514,
						"acc_stderr,none": 0.009008893392651514,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.148,
						"acc_norm,none": 0.148,
						"acc_norm_stderr,none": 0.011234866364235261,
						"acc_stderr,none": 0.011234866364235261,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.027525684670556556,
						"acc_stderr,none": 0.027525684670556556,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.161,
						"acc_norm,none": 0.161,
						"acc_norm_stderr,none": 0.011628164696727178,
						"acc_stderr,none": 0.011628164696727178,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.194,
						"acc_norm,none": 0.194,
						"acc_norm_stderr,none": 0.01251081614126436,
						"acc_stderr,none": 0.01251081614126436,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.121,
						"acc_norm,none": 0.121,
						"acc_norm_stderr,none": 0.010318210380946085,
						"acc_stderr,none": 0.010318210380946085,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.4981363736022802,
						"acc_norm,none": 0.486,
						"acc_norm_stderr,none": 0.000500609218436876,
						"acc_stderr,none": 0.04529957920734483,
						"alias": "kobest",
						"f1,none": 0.390690776277561,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.573,
						"acc_stderr,none": 0.015649789644462217,
						"alias": " - kobest_copa",
						"f1,none": 0.5722810043683432,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.358,
						"acc_norm,none": 0.486,
						"acc_norm_stderr,none": 0.022374298166353196,
						"acc_stderr,none": 0.02146143486285912,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.35605071142381,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5037783375314862,
						"acc_stderr,none": 0.025125227983562776,
						"alias": " - kobest_sentineg",
						"f1,none": 0.3753723773850123,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6366194449835048,
						"acc_stderr,none": 0.017127266793369178,
						"alias": "lambada",
						"perplexity,none": 5.492392204136331,
						"perplexity_stderr,none": 0.43498172839354293
					},
					"lambada_cloze": {
						"acc,none": 0.04880652047351058,
						"acc_stderr,none": 0.01461867816261798,
						"alias": "lambada_cloze",
						"perplexity,none": 460.8615387915018,
						"perplexity_stderr,none": 17.71592946389816
					},
					"lambada_multilingual": {
						"acc,none": 0.468969532311275,
						"acc_stderr,none": 0.08021244747250739,
						"alias": "lambada_multilingual",
						"perplexity,none": 37.52833751758278,
						"perplexity_stderr,none": 14.412301174518149
					},
					"lambada_openai": {
						"acc,none": 0.6677663496992043,
						"acc_stderr,none": 0.006562149900578275,
						"alias": " - lambada_openai",
						"perplexity,none": 4.665591769126226,
						"perplexity_stderr,none": 0.10689576694153269
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.02018241800892684,
						"acc_stderr,none": 0.001959166225850381,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 475.4489872642492,
						"perplexity_stderr,none": 16.381540433420465
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3729866097418979,
						"acc_stderr,none": 0.006737473981200102,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 58.1771479324427,
						"perplexity_stderr,none": 3.4162049025670376
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6681544731224529,
						"acc_stderr,none": 0.006560221405202012,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.666015274922298,
						"perplexity_stderr,none": 0.10707061486157689
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.3898699786532117,
						"acc_stderr,none": 0.006794901529888725,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 52.21102001247603,
						"perplexity_stderr,none": 2.7846343378698686
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.477779934019018,
						"acc_stderr,none": 0.006959095614775138,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 30.125491115602742,
						"perplexity_stderr,none": 1.5932353250681277
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.4360566660197943,
						"acc_stderr,none": 0.006908778538407587,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 42.462013252470115,
						"perplexity_stderr,none": 2.42865626066225
					},
					"lambada_standard": {
						"acc,none": 0.6050844168445566,
						"acc_stderr,none": 0.006810393291223524,
						"alias": " - lambada_standard",
						"perplexity,none": 6.318375076651966,
						"perplexity_stderr,none": 0.16062020196501867
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.07743062293809432,
						"acc_stderr,none": 0.0037236424005269997,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 446.2740903187544,
						"perplexity_stderr,none": 15.905875877620373
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.22073791348600508,
						"exact_match_stderr,get-answer": 0.010463865471633095
					},
					"logiqa": {
						"acc,none": 0.22734254992319508,
						"acc_norm,none": 0.2749615975422427,
						"acc_norm_stderr,none": 0.01751297178222521,
						"acc_stderr,none": 0.01643906767511774,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.24936386768447838,
						"acc_norm,none": 0.2862595419847328,
						"acc_norm_stderr,none": 0.011404127158026002,
						"acc_stderr,none": 0.01091549419314277,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25896147403685094,
						"acc_norm,none": 0.25326633165829143,
						"acc_norm_stderr,none": 0.00796108364801872,
						"acc_stderr,none": 0.008019338828219902,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3402880745604745,
						"acc_stderr,none": 0.004876312540042981,
						"alias": "mc_taco",
						"f1,none": 0.5056741528450123,
						"f1_stderr,none": 0.005450643778497873
					},
					"medmcqa": {
						"acc,none": 0.25603633755677746,
						"acc_norm,none": 0.25603633755677746,
						"acc_norm_stderr,none": 0.00674892575909593,
						"acc_stderr,none": 0.00674892575909593,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.24901806755695208,
						"acc_norm,none": 0.24901806755695208,
						"acc_norm_stderr,none": 0.012125135984037815,
						"acc_stderr,none": 0.012125135984037815,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.23821392963965246,
						"acc_stderr,none": 0.03691756481961924,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322674,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.23703703703703705,
						"acc_stderr,none": 0.03673731683969506,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.17763157894736842,
						"acc_stderr,none": 0.03110318238312338,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.24528301886792453,
						"acc_stderr,none": 0.02648035717989569,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.03621034121889507,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036844,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.044084400227680794,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322695,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.23121387283236994,
						"acc_stderr,none": 0.032147373020294696,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.04280105837364395,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2723404255319149,
						"acc_stderr,none": 0.029101290698386705,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21929824561403508,
						"acc_stderr,none": 0.03892431106518752,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2482758620689655,
						"acc_stderr,none": 0.036001056927277716,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2275132275132275,
						"acc_stderr,none": 0.021591269407823774,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2698412698412698,
						"acc_stderr,none": 0.03970158273235173,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2064516129032258,
						"acc_stderr,none": 0.023025899617188726,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.1724137931034483,
						"acc_stderr,none": 0.02657767218303658,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909284,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.28484848484848485,
						"acc_stderr,none": 0.03524390844511784,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.20202020202020202,
						"acc_stderr,none": 0.02860620428922987,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.20725388601036268,
						"acc_stderr,none": 0.029252823291803638,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2076923076923077,
						"acc_stderr,none": 0.0205675395672468,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.21481481481481482,
						"acc_stderr,none": 0.025040443877000673,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2184873949579832,
						"acc_stderr,none": 0.026841514322958948,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2052980132450331,
						"acc_stderr,none": 0.032979866484738336,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.20550458715596331,
						"acc_stderr,none": 0.01732435232501599,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_stderr,none": 0.02541642838876747,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.22549019607843138,
						"acc_stderr,none": 0.02933116229425173,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2616033755274262,
						"acc_stderr,none": 0.028609516716994934,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.26905829596412556,
						"acc_stderr,none": 0.02976377940687497,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.22900763358778625,
						"acc_stderr,none": 0.036853466317118506,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2450584484590861,
						"acc_stderr,none": 0.027684867991237257,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2396694214876033,
						"acc_stderr,none": 0.03896878985070415,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.042365112580946315,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.22085889570552147,
						"acc_stderr,none": 0.03259177392742178,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3125,
						"acc_stderr,none": 0.043994650575715215,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.21359223300970873,
						"acc_stderr,none": 0.040580420156460344,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.28205128205128205,
						"acc_stderr,none": 0.02948036054954119,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2388250319284802,
						"acc_stderr,none": 0.015246803197398682,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2543352601156069,
						"acc_stderr,none": 0.02344582627654555,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2435754189944134,
						"acc_stderr,none": 0.014355911964767864,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.25163398692810457,
						"acc_stderr,none": 0.0248480182638752,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.25716124879304797,
						"acc_stderr,none": 0.03879280037592875,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2282958199356913,
						"acc_stderr,none": 0.023839303311398212,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.21604938271604937,
						"acc_stderr,none": 0.022899162918445806,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2375886524822695,
						"acc_stderr,none": 0.025389512552729896,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2438070404172099,
						"acc_stderr,none": 0.010966507972178479,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.28308823529411764,
						"acc_stderr,none": 0.02736586113151381,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2565359477124183,
						"acc_stderr,none": 0.017667841612378995,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.22727272727272727,
						"acc_stderr,none": 0.040139645540727735,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.20408163265306123,
						"acc_stderr,none": 0.02580128347509051,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.22261943451413715,
						"acc_stderr,none": 0.031892950211247356,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.22388059701492538,
						"acc_stderr,none": 0.02947525023601718,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.22454804947668885,
						"acc_stderr,none": 0.04516695818369077,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542125,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.28313253012048195,
						"acc_stderr,none": 0.03507295431370519,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.29239766081871343,
						"acc_stderr,none": 0.03488647713457922,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.32531839021905246,
						"acc_stderr,none": 0.004729124164815638,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3233319772172498,
						"acc_stderr,none": 0.004717515195651371,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.41421568627450983,
						"acc_stderr,none": 0.024416585751307857,
						"alias": "mrpc",
						"f1,none": 0.37922077922077924,
						"f1_stderr,none": 0.03142902706645868
					},
					"multimedqa": {
						"acc,none": 0.2772178850248403,
						"acc_norm,none": 0.2525002745510116,
						"acc_norm_stderr,none": 8.430321161813488e-05,
						"acc_stderr,none": 0.08190805224080097,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5719884488448845,
						"acc_stderr,none": 0.007106976252751528,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6760722364448263,
						"mrr_stderr,none": 0.010262051958579363,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4435665914221219,
						"r@2_stderr,none": 0.016699919496280195
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6361926278397946,
						"mrr_stderr,none": 0.010414029815308106,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4717832957110609,
						"r@2_stderr,none": 0.01678053141516135
					},
					"openbookqa": {
						"acc,none": 0.236,
						"acc_norm,none": 0.344,
						"acc_norm_stderr,none": 0.02126575803797874,
						"acc_stderr,none": 0.019008699622084728,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4805,
						"acc_stderr,none": 0.011174628009718265,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4595,
						"acc_stderr,none": 0.011146389370464357,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.5085,
						"acc_stderr,none": 0.011181519941139164,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5465,
						"acc_stderr,none": 0.011134669525078664,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.511,
						"acc_stderr,none": 0.011180429374603772,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5505,
						"acc_stderr,none": 0.011125950223877364,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5085,
						"acc_stderr,none": 0.011181519941139164,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5092857142857142,
						"acc_stderr,none": 0.026347480893196357,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7257889009793254,
						"acc_norm,none": 0.7236126224156693,
						"acc_norm_stderr,none": 0.010434162388275598,
						"acc_stderr,none": 0.010408618664933382,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.26622544833475664,
						"acc_norm,none": 0.2796221178479932,
						"acc_norm_stderr,none": 0.003278982484044806,
						"acc_stderr,none": 0.0032290833678478687,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228134,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7177074337037033,
						"acc_norm,none": 0.4950699171244749,
						"acc_norm_stderr,none": 0.008233280981145237,
						"acc_stderr,none": 0.14555233221174574,
						"alias": "pythia",
						"bits_per_byte,none": 0.7098604202965154,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6356458619040524,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.665591769126226,
						"perplexity_stderr,none": 0.10689576694153269,
						"word_perplexity,none": 13.889700231774906,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.34397163120567376,
						"acc_norm,none": 0.3953900709219858,
						"acc_norm_stderr,none": 0.06452384655268414,
						"acc_stderr,none": 0.04603006083513485,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.525,
						"acc_norm_stderr,none": 0.045777595341980594,
						"acc_stderr,none": 0.0451938453788867,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.38125,
						"acc_norm_stderr,none": 0.038518021388670956,
						"acc_stderr,none": 0.03675892481369823,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.33098591549295775,
						"acc_norm,none": 0.3485915492957746,
						"acc_norm_stderr,none": 0.028326433924036706,
						"acc_stderr,none": 0.02797236390054683,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4933186893648179,
						"acc_stderr,none": 0.006764806510150313,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5827355923818947,
						"acc_stderr,none": 0.0024524205315383358,
						"alias": "qqp",
						"f1,none": 0.5472599431055767,
						"f1_stderr,none": 0.0031057733485620053
					},
					"race": {
						"acc,none": 0.3473684210526316,
						"acc_stderr,none": 0.014735977850381395,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5595667870036101,
						"acc_stderr,none": 0.02988212336311873,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.896,
						"acc_norm,none": 0.861,
						"acc_norm_stderr,none": 0.010945263761042963,
						"acc_stderr,none": 0.009658016218524306,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5523465703971119,
						"acc_stderr,none": 0.02993107036293953,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.569954128440367,
						"acc_stderr,none": 0.01677522159623909,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5324402679196241,
						"acc_norm,none": 0.7248825352394281,
						"acc_norm_stderr,none": 0.0031573564958999683,
						"acc_stderr,none": 0.003527643743813799,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5380187015407141,
						"acc_stderr,none": 0.026637104525336262,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5027043269230769,
						"acc_stderr,none": 0.005004182229818367,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.6050471267862573,
						"acc_stderr,none": 0.0049214908804866755,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5077450980392156,
						"acc_stderr,none": 0.004950386395909265,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.2803044957760258,
						"acc_stderr,none": 0.0014974998548866016,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2692778457772338,
						"bleu_acc_stderr,none": 0.01552856663708729,
						"bleu_diff,none": -9.061930693874382,
						"bleu_diff_stderr,none": 0.7719429195586806,
						"bleu_max,none": 25.08065863163638,
						"bleu_max_stderr,none": 0.759005892708332,
						"rouge1_acc,none": 0.26560587515299877,
						"rouge1_acc_stderr,none": 0.015461027627253597,
						"rouge1_diff,none": -11.20350138154079,
						"rouge1_diff_stderr,none": 0.7955557578523819,
						"rouge1_max,none": 50.08099781706327,
						"rouge1_max_stderr,none": 0.8385516974706783,
						"rouge2_acc,none": 0.21909424724602203,
						"rouge2_acc_stderr,none": 0.014480038578757449,
						"rouge2_diff,none": -13.55573800474474,
						"rouge2_diff_stderr,none": 0.971632769701086,
						"rouge2_max,none": 33.712150727593624,
						"rouge2_max_stderr,none": 0.9684697654611896,
						"rougeL_acc,none": 0.2558139534883721,
						"rougeL_acc_stderr,none": 0.01527417621928335,
						"rougeL_diff,none": -11.592760333260497,
						"rougeL_diff_stderr,none": 0.8092485426227116,
						"rougeL_max,none": 47.29944986882853,
						"rougeL_max_stderr,none": 0.8472323940201789
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2692778457772338,
						"bleu_acc_stderr,none": 0.01552856663708729,
						"bleu_diff,none": -9.061930693874382,
						"bleu_diff_stderr,none": 0.7719429195586806,
						"bleu_max,none": 25.08065863163638,
						"bleu_max_stderr,none": 0.759005892708332,
						"rouge1_acc,none": 0.26560587515299877,
						"rouge1_acc_stderr,none": 0.015461027627253597,
						"rouge1_diff,none": -11.20350138154079,
						"rouge1_diff_stderr,none": 0.7955557578523819,
						"rouge1_max,none": 50.08099781706327,
						"rouge1_max_stderr,none": 0.8385516974706783,
						"rouge2_acc,none": 0.21909424724602203,
						"rouge2_acc_stderr,none": 0.014480038578757449,
						"rouge2_diff,none": -13.55573800474474,
						"rouge2_diff_stderr,none": 0.971632769701086,
						"rouge2_max,none": 33.712150727593624,
						"rouge2_max_stderr,none": 0.9684697654611896,
						"rougeL_acc,none": 0.2558139534883721,
						"rougeL_acc_stderr,none": 0.01527417621928335,
						"rougeL_diff,none": -11.592760333260497,
						"rougeL_diff_stderr,none": 0.8092485426227116,
						"rougeL_max,none": 47.29944986882853,
						"rougeL_max_stderr,none": 0.8472323940201789
					},
					"truthfulqa_mc1": {
						"acc,none": 0.20807833537331702,
						"acc_stderr,none": 0.014210503473576625,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3525306561787345,
						"acc_stderr,none": 0.01354218932350523,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.0004921259842519685,
						"exact_match_stderr,none": 0.000492125984251961
					},
					"wic": {
						"acc,none": 0.5094043887147336,
						"acc_stderr,none": 0.019807216763271497,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.7098604202965154,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6356458619040524,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 13.889700231774906,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.5943172849250198,
						"acc_stderr,none": 0.013800206336014205,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5211267605633803,
						"acc_stderr,none": 0.05970805879899504,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.3942307692307692,
						"acc_stderr,none": 0.04815154775990712,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7765567765567766,
						"acc_stderr,none": 0.025257231735255514,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5796363636363636,
						"acc_stderr,none": 0.052055661886820155,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.02224224437573102,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928028,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.666,
						"acc_stderr,none": 0.02111349234774373,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.64,
						"acc_stderr,none": 0.02148775108972052,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.02238235778196213,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.02231833811987053,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.022318338119870527,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.02223197069632112,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.022064943313928862,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.642,
						"acc_stderr,none": 0.021461434862859126,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.642,
						"acc_stderr,none": 0.021461434862859122,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.41416331994645245,
						"acc_stderr,none": 0.0478208968063563,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.00947697684977859,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.42409638554216866,
						"acc_stderr,none": 0.009905918244994481,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.46907630522088356,
						"acc_stderr,none": 0.010002886789051677,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.37991967871485943,
						"acc_stderr,none": 0.009728758452987872,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.519277108433735,
						"acc_stderr,none": 0.010014621554188653,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4759036144578313,
						"acc_stderr,none": 0.01001042775321067,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4827309236947791,
						"acc_stderr,none": 0.010016093498409708,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3887550200803213,
						"acc_stderr,none": 0.009770869423441486,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.43012048192771085,
						"acc_stderr,none": 0.009923711675408058,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3546184738955823,
						"acc_stderr,none": 0.00958907012786187,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41164658634538154,
						"acc_stderr,none": 0.009864360821750346,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.40803212851405624,
						"acc_stderr,none": 0.009851078965044875,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3481927710843373,
						"acc_stderr,none": 0.009548980649153377,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.4397590361445783,
						"acc_stderr,none": 0.009949067285169354,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3429718875502008,
						"acc_stderr,none": 0.009514999934033461,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.578846038144516,
						"acc_stderr,none": 0.053471571298976166,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5479814692256784,
						"acc_stderr,none": 0.012807742345189277,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7127729980145598,
						"acc_stderr,none": 0.011643935161147864,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6340172071475844,
						"acc_stderr,none": 0.012396308684399377,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5215089344804765,
						"acc_stderr,none": 0.012855214257296603,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5453342157511581,
						"acc_stderr,none": 0.012814127367359412,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6062210456651225,
						"acc_stderr,none": 0.012573415912965178,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4923891462607545,
						"acc_stderr,none": 0.012865634571114483,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6267372600926538,
						"acc_stderr,none": 0.012446911553527132,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5208471211118465,
						"acc_stderr,none": 0.01285593628288127,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5625413633355394,
						"acc_stderr,none": 0.012766070974549619,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5969556585043018,
						"acc_stderr,none": 0.012622895215907705,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7491571139581928,
						"acc_stderr,none": 0.039580427285498064,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8180645161290323,
						"acc_stderr,none": 0.008002661013430045,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6746987951807228,
						"acc_stderr,none": 0.051735765211123864,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6496350364963503,
						"acc_stderr,none": 0.015413891595766078,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6920152091254753,
						"acc_stderr,none": 0.02852146369115504,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6666666666666666,
						"acc_stderr,none": 0.026602896148920786,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7142857142857143,
						"acc_stderr,none": 0.02014271312297313,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "SmerkyG/rwkv6-world-1b6"
	},
	"SmerkyG/rwkv6-world-3b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5594701240135288,
						"acc_norm,none": 0.532130777903044,
						"acc_norm_stderr,none": 0.0773842728905496,
						"acc_stderr,none": 0.10124819228582911,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3640625,
						"acc_stderr,none": 0.014782647995619878,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.1888,
						"acc_stderr,none": 0.17781426014146343,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8460149253731343,
						"acc_stderr,none": 0.14076081991596986,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.25260029717682025,
						"acc_norm,none": 0.25260029717682025,
						"acc_norm_stderr,none": 0.12065927314620238,
						"acc_stderr,none": 0.12065927314620238,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2509929200483509,
						"acc_norm,none": 0.2509929200483509,
						"acc_norm_stderr,none": 0.04294993109265056,
						"acc_stderr,none": 0.04294993109265056,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.3059220333929638,
						"likelihood_diff_stderr,none": 0.5136594530731841,
						"pct_stereotype,none": 0.5875074537865236,
						"pct_stereotype_stderr,none": 0.07151882028756515
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0004921259842519685,
						"exact_match_stderr,none": 0.0004921259842519594
					},
					"glue": {
						"acc,none": 0.58234575988566,
						"acc_stderr,none": 0.012235434109927088,
						"alias": "glue",
						"f1,none": 0.6407219089231401,
						"f1_stderr,none": 0.0009851616975286729,
						"mcc,none": 0.0020830787503762953,
						"mcc_stderr,none": 0.03109888317325633
					},
					"kmmlu": {
						"acc,none": 0.1987005486572336,
						"acc_norm,none": 0.1987005486572336,
						"acc_norm_stderr,none": 0.033468249500047356,
						"acc_stderr,none": 0.033468249500047356,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5277351458013594,
						"acc_norm,none": 0.526,
						"acc_norm_stderr,none": 0.0004996472945891778,
						"acc_stderr,none": 0.05014420963066275,
						"alias": "kobest",
						"f1,none": 0.4593965207472895,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6770813118571707,
						"acc_stderr,none": 0.016474273358248928,
						"alias": "lambada",
						"perplexity,none": 4.468392589321696,
						"perplexity_stderr,none": 0.28886798877698455
					},
					"lambada_cloze": {
						"acc,none": 0.07005627789637105,
						"acc_stderr,none": 0.025757074852232192,
						"alias": "lambada_cloze",
						"perplexity,none": 433.38508675165787,
						"perplexity_stderr,none": 126.49961730170455
					},
					"lambada_multilingual": {
						"acc,none": 0.5048709489617698,
						"acc_stderr,none": 0.0785743922588123,
						"alias": "lambada_multilingual",
						"perplexity,none": 28.052505514598444,
						"perplexity_stderr,none": 10.46454177414525
					},
					"mmlu": {
						"acc,none": 0.25345392394245836,
						"acc_stderr,none": 0.03836809743654213,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2601487778958555,
						"acc_stderr,none": 0.028561424050715687,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.27164467331831355,
						"acc_stderr,none": 0.03908199961155891,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2349691257718557,
						"acc_stderr,none": 0.035604015670430735,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.24357754519505234,
						"acc_stderr,none": 0.04711762589408258,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2943931866572037,
						"acc_norm,none": 0.27006373784249116,
						"acc_norm_stderr,none": 8.605444729523063e-05,
						"acc_stderr,none": 0.09291317724421184,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.49692857142857144,
						"acc_stderr,none": 0.04403144516789717,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7311307895223446,
						"acc_norm,none": 0.5389946142143349,
						"acc_norm_stderr,none": 0.00854315545271045,
						"acc_stderr,none": 0.13890739960129175,
						"alias": "pythia",
						"bits_per_byte,none": 0.6717306930962005,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5929828037968814,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.924206178453125,
						"perplexity_stderr,none": 0.0821956408016066,
						"word_perplexity,none": 12.059069823657135,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.32269503546099293,
						"acc_norm,none": 0.41134751773049644,
						"acc_norm_stderr,none": 0.056379014482506803,
						"acc_stderr,none": 0.03784542110540027,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5315962863132674,
						"acc_stderr,none": 0.02431298277346284,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3005070051292488,
						"acc_stderr,none": 0.0012789567297438122,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2937576499388005,
						"bleu_acc_stderr,none": 0.015945068581236604,
						"bleu_diff,none": -7.735023660480696,
						"bleu_diff_stderr,none": 0.7702991508502799,
						"bleu_max,none": 24.93657615966411,
						"bleu_max_stderr,none": 0.7711935927537135,
						"rouge1_acc,none": 0.2839657282741738,
						"rouge1_acc_stderr,none": 0.01578537085839671,
						"rouge1_diff,none": -9.865338598421463,
						"rouge1_diff_stderr,none": 0.8345101271358512,
						"rouge1_max,none": 49.861965407064524,
						"rouge1_max_stderr,none": 0.8519780444462061,
						"rouge2_acc,none": 0.22643818849449204,
						"rouge2_acc_stderr,none": 0.014651337324602593,
						"rouge2_diff,none": -12.067396511012621,
						"rouge2_diff_stderr,none": 1.0165379157789043,
						"rouge2_max,none": 33.71419121976144,
						"rouge2_max_stderr,none": 0.9770836179707644,
						"rougeL_acc,none": 0.2668298653610771,
						"rougeL_acc_stderr,none": 0.015483691939237258,
						"rougeL_diff,none": -10.524215373912806,
						"rougeL_diff_stderr,none": 0.8448810138863718,
						"rougeL_max,none": 46.776402003616276,
						"rougeL_max_stderr,none": 0.8657546054038466
					},
					"xcopa": {
						"acc,none": 0.5952727272727273,
						"acc_stderr,none": 0.06103875896467541,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4415796519410977,
						"acc_stderr,none": 0.04681755111612141,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6071836832922207,
						"acc_stderr,none": 0.05979388819830518,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7779276241852101,
						"acc_stderr,none": 0.04102937959859529,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5594701240135288,
						"acc_norm,none": 0.532130777903044,
						"acc_norm_stderr,none": 0.0773842728905496,
						"acc_stderr,none": 0.10124819228582911,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3640625,
						"acc_stderr,none": 0.014782647995619878,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.366,
						"acc_stderr,none": 0.015240612726405754,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.365,
						"acc_stderr,none": 0.015231776226264896,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3616666666666667,
						"acc_stderr,none": 0.01387613166312388,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3455631399317406,
						"acc_norm,none": 0.36945392491467577,
						"acc_norm_stderr,none": 0.014104578366491895,
						"acc_stderr,none": 0.013896938461145687,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.664983164983165,
						"acc_norm,none": 0.6123737373737373,
						"acc_norm_stderr,none": 0.00999730791444761,
						"acc_stderr,none": 0.009685160765932357,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.1888,
						"acc_stderr,none": 0.17781426014146343,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0285,
						"acc_stderr,none": 0.0037216663472428996,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.5405,
						"acc_stderr,none": 0.011146389370464364,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0935,
						"acc_stderr,none": 0.006511534000335073,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.657,
						"acc_stderr,none": 0.010617526356593672,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.1005,
						"acc_stderr,none": 0.006724766631127032,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.2565,
						"acc_stderr,none": 0.009767373023894072,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.027,
						"acc_stderr,none": 0.003625199447688036,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.1465,
						"acc_stderr,none": 0.007908865283657352,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0075,
						"acc_stderr,none": 0.0019296986470519841,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.031,
						"acc_stderr,none": 0.003876469206217495,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8460149253731343,
						"acc_stderr,none": 0.14076081991596986,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.00891686663074591,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.003299983316607816,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469274,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.012461592646659992,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662739,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.759,
						"acc_stderr,none": 0.013531522534515448,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.623,
						"acc_stderr,none": 0.015333170125779855,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.831,
						"acc_stderr,none": 0.011856625977890119,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785133,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098728,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611491,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.005893957816165572,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389621,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406117,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315145,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832013,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611471,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621231,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.011828605831454264,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.811,
						"acc_stderr,none": 0.012386784588117709,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475286,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244068,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890141,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611466,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.414,
						"acc_stderr,none": 0.015583544104177526,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787728,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.012486268734370143,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.014111099288259588,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946087,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565886,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.0076298239962803,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697593,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.729,
						"acc_stderr,none": 0.014062601350986186,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787731,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.628,
						"acc_stderr,none": 0.015292149942040577,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.562,
						"acc_stderr,none": 0.01569721001969469,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.014470846741134713,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427416,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.813,
						"acc_stderr,none": 0.012336254828074112,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400224,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524298,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.01295371756673723,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163041,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469417,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024964,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.745,
						"acc_stderr,none": 0.013790038620872826,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.456,
						"acc_stderr,none": 0.015757928553979162,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410044,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.00823035471524407,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437573,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.756,
						"acc_stderr,none": 0.013588548437881444,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.428,
						"acc_stderr,none": 0.015654426245029284,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160336,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074798,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.693,
						"acc_stderr,none": 0.014593284892852623,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.010131468138756991,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248121,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.012953717566737227,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.010811655372416051,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406107,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.971,
						"acc_stderr,none": 0.005309160685756978,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306501,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.456,
						"acc_stderr,none": 0.015757928553979162,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.015356947477797582,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6519877675840978,
						"acc_stderr,none": 0.008331237559535392,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.44642857142857145,
						"acc_stderr,none": 0.06703189227942397,
						"alias": "cb",
						"f1,none": 0.34137931034482755,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.25260029717682025,
						"acc_norm,none": 0.25260029717682025,
						"acc_norm_stderr,none": 0.12065927314620238,
						"acc_stderr,none": 0.12065927314620238,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.15151515151515152,
						"acc_norm,none": 0.15151515151515152,
						"acc_norm_stderr,none": 0.06338333534349057,
						"acc_stderr,none": 0.06338333534349057,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275461,
						"acc_stderr,none": 0.08124094920275461,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.23404255319148937,
						"acc_norm,none": 0.23404255319148937,
						"acc_norm_stderr,none": 0.06242676343682883,
						"acc_stderr,none": 0.06242676343682883,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252413,
						"acc_stderr,none": 0.09477598811252413,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.060606060606060615,
						"acc_stderr,none": 0.060606060606060615,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.0686105685212965,
						"acc_stderr,none": 0.0686105685212965,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.1875,
						"acc_norm,none": 0.1875,
						"acc_norm_stderr,none": 0.10077822185373188,
						"acc_stderr,none": 0.10077822185373188,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.3783783783783784,
						"acc_norm,none": 0.3783783783783784,
						"acc_norm_stderr,none": 0.08083044344561426,
						"acc_stderr,none": 0.08083044344561426,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031766,
						"acc_stderr,none": 0.07633651333031766,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.41935483870967744,
						"acc_norm,none": 0.41935483870967744,
						"acc_norm_stderr,none": 0.09009187125012223,
						"acc_stderr,none": 0.09009187125012223,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.45,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.11413288653790232,
						"acc_stderr,none": 0.11413288653790232,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.09090909090909091,
						"acc_norm,none": 0.09090909090909091,
						"acc_norm_stderr,none": 0.06273323266748673,
						"acc_stderr,none": 0.06273323266748673,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252413,
						"acc_stderr,none": 0.09477598811252413,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.0917662935482247,
						"acc_stderr,none": 0.0917662935482247,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.11236664374387367,
						"acc_stderr,none": 0.11236664374387367,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755131,
						"acc_stderr,none": 0.08780518530755131,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522557,
						"acc_stderr,none": 0.11369720523522557,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.1836734693877551,
						"acc_norm,none": 0.1836734693877551,
						"acc_norm_stderr,none": 0.05589005688828228,
						"acc_stderr,none": 0.05589005688828228,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.08742975048915691,
						"acc_stderr,none": 0.08742975048915691,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.16326530612244897,
						"acc_norm,none": 0.16326530612244897,
						"acc_norm_stderr,none": 0.05334825558285076,
						"acc_stderr,none": 0.05334825558285076,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.06390760676613884,
						"acc_stderr,none": 0.06390760676613884,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.05650315562208096,
						"acc_stderr,none": 0.05650315562208096,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996394,
						"acc_stderr,none": 0.08081046758996394,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2509929200483509,
						"acc_norm,none": 0.2509929200483509,
						"acc_norm_stderr,none": 0.04294993109265056,
						"acc_stderr,none": 0.04294993109265056,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.27218934911242604,
						"acc_norm,none": 0.27218934911242604,
						"acc_norm_stderr,none": 0.034339196275485345,
						"acc_stderr,none": 0.034339196275485345,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.0353866849031339,
						"acc_stderr,none": 0.0353866849031339,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.21951219512195122,
						"acc_norm,none": 0.21951219512195122,
						"acc_norm_stderr,none": 0.03242041613395382,
						"acc_stderr,none": 0.03242041613395382,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.03311643267635493,
						"acc_stderr,none": 0.03311643267635493,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.033175059300091805,
						"acc_stderr,none": 0.033175059300091805,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.03053259742712212,
						"acc_stderr,none": 0.03053259742712212,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.03311643267635493,
						"acc_stderr,none": 0.03311643267635493,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.22137404580152673,
						"acc_norm,none": 0.22137404580152673,
						"acc_norm_stderr,none": 0.03641297081313729,
						"acc_stderr,none": 0.03641297081313729,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.19117647058823528,
						"acc_norm,none": 0.19117647058823528,
						"acc_norm_stderr,none": 0.03384365225033988,
						"acc_stderr,none": 0.03384365225033988,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.04522350077382029,
						"acc_stderr,none": 0.04522350077382029,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.22291021671826625,
						"acc_norm,none": 0.22291021671826625,
						"acc_norm_stderr,none": 0.023193839672648014,
						"acc_stderr,none": 0.023193839672648014,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.0309645179269234,
						"acc_stderr,none": 0.0309645179269234,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2346368715083799,
						"acc_norm,none": 0.2346368715083799,
						"acc_norm_stderr,none": 0.03176302794175762,
						"acc_stderr,none": 0.03176302794175762,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2869198312236287,
						"acc_norm,none": 0.2869198312236287,
						"acc_norm_stderr,none": 0.029443773022594693,
						"acc_stderr,none": 0.029443773022594693,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800372,
						"acc_stderr,none": 0.04142972007800372,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.2803738317757009,
						"acc_norm,none": 0.2803738317757009,
						"acc_norm_stderr,none": 0.043628399335700986,
						"acc_stderr,none": 0.043628399335700986,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800376,
						"acc_stderr,none": 0.04142972007800376,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.23148148148148148,
						"acc_norm,none": 0.23148148148148148,
						"acc_norm_stderr,none": 0.04077494709252626,
						"acc_stderr,none": 0.04077494709252626,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.03922322702763677,
						"acc_stderr,none": 0.03922322702763677,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.19811320754716982,
						"acc_norm,none": 0.19811320754716982,
						"acc_norm_stderr,none": 0.038897222883185506,
						"acc_stderr,none": 0.038897222883185506,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2271062271062271,
						"acc_norm,none": 0.2271062271062271,
						"acc_norm_stderr,none": 0.025403290424595132,
						"acc_stderr,none": 0.025403290424595132,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.27941176470588236,
						"acc_norm,none": 0.27941176470588236,
						"acc_norm_stderr,none": 0.031493281045079556,
						"acc_stderr,none": 0.031493281045079556,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.24561403508771928,
						"acc_norm,none": 0.24561403508771928,
						"acc_norm_stderr,none": 0.03301405946987249,
						"acc_stderr,none": 0.03301405946987249,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.03814280082617515,
						"acc_stderr,none": 0.03814280082617515,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2733812949640288,
						"acc_norm,none": 0.2733812949640288,
						"acc_norm_stderr,none": 0.0379400712153362,
						"acc_stderr,none": 0.0379400712153362,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.27672955974842767,
						"acc_norm,none": 0.27672955974842767,
						"acc_norm_stderr,none": 0.03559177035707934,
						"acc_stderr,none": 0.03559177035707934,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2392638036809816,
						"acc_norm,none": 0.2392638036809816,
						"acc_norm_stderr,none": 0.03351953879521271,
						"acc_stderr,none": 0.03351953879521271,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.29651162790697677,
						"acc_norm,none": 0.29651162790697677,
						"acc_norm_stderr,none": 0.03492619473255953,
						"acc_stderr,none": 0.03492619473255953,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.027474608338697398,
						"acc_stderr,none": 0.027474608338697398,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2676767676767677,
						"acc_norm,none": 0.2676767676767677,
						"acc_norm_stderr,none": 0.03154449888270285,
						"acc_stderr,none": 0.03154449888270285,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.24789915966386555,
						"acc_norm,none": 0.24789915966386555,
						"acc_norm_stderr,none": 0.028047967224176892,
						"acc_stderr,none": 0.028047967224176892,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.02836109930007507,
						"acc_stderr,none": 0.02836109930007507,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.22962962962962963,
						"acc_norm,none": 0.22962962962962963,
						"acc_norm_stderr,none": 0.03633384414073465,
						"acc_stderr,none": 0.03633384414073465,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.26573426573426573,
						"acc_norm,none": 0.26573426573426573,
						"acc_norm_stderr,none": 0.037068604626235575,
						"acc_stderr,none": 0.037068604626235575,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.24431818181818182,
						"acc_norm,none": 0.24431818181818182,
						"acc_norm_stderr,none": 0.03248092256353737,
						"acc_stderr,none": 0.03248092256353737,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.22818791946308725,
						"acc_norm,none": 0.22818791946308725,
						"acc_norm_stderr,none": 0.03449619964127219,
						"acc_stderr,none": 0.03449619964127219,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.032793177922689466,
						"acc_stderr,none": 0.032793177922689466,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.19696969696969696,
						"acc_norm,none": 0.19696969696969696,
						"acc_norm_stderr,none": 0.03474801718164945,
						"acc_stderr,none": 0.03474801718164945,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.211864406779661,
						"acc_norm,none": 0.211864406779661,
						"acc_norm_stderr,none": 0.03777778933227659,
						"acc_stderr,none": 0.03777778933227659,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.03334645408665338,
						"acc_stderr,none": 0.03334645408665338,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.04013964554072775,
						"acc_stderr,none": 0.04013964554072775,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.21428571428571427,
						"acc_norm,none": 0.21428571428571427,
						"acc_norm_stderr,none": 0.03670066451047181,
						"acc_stderr,none": 0.03670066451047181,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.03186439492581517,
						"acc_stderr,none": 0.03186439492581517,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.03336605189761064,
						"acc_stderr,none": 0.03336605189761064,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25304136253041365,
						"acc_norm,none": 0.25304136253041365,
						"acc_norm_stderr,none": 0.02147099185339829,
						"acc_stderr,none": 0.02147099185339829,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2570093457943925,
						"acc_norm,none": 0.2570093457943925,
						"acc_norm_stderr,none": 0.02994169153324465,
						"acc_stderr,none": 0.02994169153324465,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.1951219512195122,
						"acc_norm,none": 0.1951219512195122,
						"acc_norm_stderr,none": 0.0358788233093556,
						"acc_stderr,none": 0.0358788233093556,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.32786885245901637,
						"acc_norm,none": 0.32786885245901637,
						"acc_norm_stderr,none": 0.04267606874299955,
						"acc_stderr,none": 0.04267606874299955,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.02946134404236891,
						"acc_stderr,none": 0.02946134404236891,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2388888888888889,
						"acc_norm,none": 0.2388888888888889,
						"acc_norm_stderr,none": 0.03187098535605761,
						"acc_stderr,none": 0.03187098535605761,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.30687830687830686,
						"acc_norm,none": 0.30687830687830686,
						"acc_norm_stderr,none": 0.03363635410184866,
						"acc_stderr,none": 0.03363635410184866,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3275862068965517,
						"acc_norm,none": 0.3275862068965517,
						"acc_norm_stderr,none": 0.04376552980994349,
						"acc_stderr,none": 0.04376552980994349,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.25517241379310346,
						"acc_norm,none": 0.25517241379310346,
						"acc_norm_stderr,none": 0.03632984052707842,
						"acc_stderr,none": 0.03632984052707842,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.042857142857142844,
						"acc_stderr,none": 0.042857142857142844,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26857142857142857,
						"acc_norm,none": 0.26857142857142857,
						"acc_norm_stderr,none": 0.033600151915923894,
						"acc_stderr,none": 0.033600151915923894,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.22274881516587677,
						"acc_norm,none": 0.22274881516587677,
						"acc_norm_stderr,none": 0.028713011859407108,
						"acc_stderr,none": 0.028713011859407108,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2872340425531915,
						"acc_norm,none": 0.2872340425531915,
						"acc_norm_stderr,none": 0.02336553857581674,
						"acc_stderr,none": 0.02336553857581674,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3017241379310345,
						"acc_norm,none": 0.3017241379310345,
						"acc_norm_stderr,none": 0.03020039007523149,
						"acc_stderr,none": 0.03020039007523149,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.23563218390804597,
						"acc_norm,none": 0.23563218390804597,
						"acc_norm_stderr,none": 0.032266023739324454,
						"acc_stderr,none": 0.032266023739324454,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.22962962962962963,
						"acc_norm,none": 0.22962962962962963,
						"acc_norm_stderr,none": 0.03633384414073465,
						"acc_stderr,none": 0.03633384414073465,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.22566371681415928,
						"acc_norm,none": 0.22566371681415928,
						"acc_norm_stderr,none": 0.027867910955296744,
						"acc_stderr,none": 0.027867910955296744,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0347769116216366,
						"acc_stderr,none": 0.0347769116216366,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.03162930395697947,
						"acc_stderr,none": 0.03162930395697947,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.03279317792268948,
						"acc_stderr,none": 0.03279317792268948,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.03471460744058984,
						"acc_stderr,none": 0.03471460744058984,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.03404916326237584,
						"acc_stderr,none": 0.03404916326237584,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0020830787503762953,
						"mcc_stderr,none": 0.03109888317325633
					},
					"copa": {
						"acc,none": 0.82,
						"acc_stderr,none": 0.03861229196653694,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.3059220333929638,
						"likelihood_diff_stderr,none": 0.5136594530731841,
						"pct_stereotype,none": 0.5875074537865236,
						"pct_stereotype_stderr,none": 0.07151882028756515
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.4540846750149075,
						"likelihood_diff_stderr,none": 0.0844355419184694,
						"pct_stereotype,none": 0.6022659511031604,
						"pct_stereotype_stderr,none": 0.011955108834070407
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.857142857142857,
						"likelihood_diff_stderr,none": 0.398105688034323,
						"pct_stereotype,none": 0.7142857142857143,
						"pct_stereotype_stderr,none": 0.04761904761904759
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.954545454545454,
						"likelihood_diff_stderr,none": 2.0129744448706948,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.319230769230769,
						"likelihood_diff_stderr,none": 0.6400708835330238,
						"pct_stereotype,none": 0.7538461538461538,
						"pct_stereotype_stderr,none": 0.05384615384615383
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.3609375,
						"likelihood_diff_stderr,none": 0.15522782322683704,
						"pct_stereotype,none": 0.603125,
						"pct_stereotype_stderr,none": 0.02739272232337023
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.2621527777777777,
						"likelihood_diff_stderr,none": 0.21299923491218659,
						"pct_stereotype,none": 0.5694444444444444,
						"pct_stereotype_stderr,none": 0.03376922151252335
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.640625,
						"likelihood_diff_stderr,none": 0.3426562438116692,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.2866633858267718,
						"likelihood_diff_stderr,none": 0.14333528484107635,
						"pct_stereotype,none": 0.4704724409448819,
						"pct_stereotype_stderr,none": 0.022167024359332235
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.5146396396396398,
						"likelihood_diff_stderr,none": 0.34883735002427163,
						"pct_stereotype,none": 0.7567567567567568,
						"pct_stereotype_stderr,none": 0.04090743073860918
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.065860215053763,
						"likelihood_diff_stderr,none": 0.3724864696569066,
						"pct_stereotype,none": 0.8064516129032258,
						"pct_stereotype_stderr,none": 0.04118983213348786
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.235526315789474,
						"likelihood_diff_stderr,none": 0.2504968022463132,
						"pct_stereotype,none": 0.6526315789473685,
						"pct_stereotype_stderr,none": 0.03463365347393427
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.157871198568873,
						"likelihood_diff_stderr,none": 0.07455484321443681,
						"pct_stereotype,none": 0.571258199165176,
						"pct_stereotype_stderr,none": 0.012088631245959934
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.0680555555555555,
						"likelihood_diff_stderr,none": 0.24749967773146958,
						"pct_stereotype,none": 0.6,
						"pct_stereotype_stderr,none": 0.051929078688949845
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.3461538461538463,
						"likelihood_diff_stderr,none": 0.5185180293351063,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.8522727272727275,
						"likelihood_diff_stderr,none": 0.3987515947139586,
						"pct_stereotype,none": 0.7727272727272727,
						"pct_stereotype_stderr,none": 0.05197926135426052
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.7052180685358254,
						"likelihood_diff_stderr,none": 0.14630323886319604,
						"pct_stereotype,none": 0.557632398753894,
						"pct_stereotype_stderr,none": 0.027764551737212487
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.4594861660079053,
						"likelihood_diff_stderr,none": 0.19290423148456318,
						"pct_stereotype,none": 0.42292490118577075,
						"pct_stereotype_stderr,none": 0.031120568731718617
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.654513888888889,
						"likelihood_diff_stderr,none": 0.5193670266485347,
						"pct_stereotype,none": 0.6805555555555556,
						"pct_stereotype_stderr,none": 0.05533504751887217
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.8730978260869566,
						"likelihood_diff_stderr,none": 0.13552445692468776,
						"pct_stereotype,none": 0.49782608695652175,
						"pct_stereotype_stderr,none": 0.023337780813399874
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.258695652173913,
						"likelihood_diff_stderr,none": 0.29763449604812353,
						"pct_stereotype,none": 0.7391304347826086,
						"pct_stereotype_stderr,none": 0.041126317518561634
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.0618131868131866,
						"likelihood_diff_stderr,none": 0.2839667399190287,
						"pct_stereotype,none": 0.7802197802197802,
						"pct_stereotype_stderr,none": 0.043649726328985325
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.5057397959183674,
						"likelihood_diff_stderr,none": 0.24945042512012475,
						"pct_stereotype,none": 0.6530612244897959,
						"pct_stereotype_stderr,none": 0.03408678678944597
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0004921259842519685,
						"exact_match_stderr,none": 0.0004921259842519594
					},
					"glue": {
						"acc,none": 0.58234575988566,
						"acc_stderr,none": 0.012235434109927088,
						"alias": "glue",
						"f1,none": 0.6407219089231401,
						"f1_stderr,none": 0.0009851616975286729,
						"mcc,none": 0.0020830787503762953,
						"mcc_stderr,none": 0.03109888317325633
					},
					"hellaswag": {
						"acc,none": 0.47998406691894047,
						"acc_norm,none": 0.6475801633140809,
						"acc_norm_stderr,none": 0.004767475366689806,
						"acc_stderr,none": 0.004985781620467015,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.1987005486572336,
						"acc_norm,none": 0.1987005486572336,
						"acc_norm_stderr,none": 0.033468249500047356,
						"acc_stderr,none": 0.033468249500047356,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.17,
						"acc_norm,none": 0.17,
						"acc_norm_stderr,none": 0.03775251680686371,
						"acc_stderr,none": 0.03775251680686371,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.157,
						"acc_norm,none": 0.157,
						"acc_norm_stderr,none": 0.011510146979230187,
						"acc_stderr,none": 0.011510146979230187,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.182,
						"acc_norm,none": 0.182,
						"acc_norm_stderr,none": 0.012207580637662158,
						"acc_stderr,none": 0.012207580637662158,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.013607356839598126,
						"acc_stderr,none": 0.013607356839598126,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.013531522534515431,
						"acc_stderr,none": 0.013531522534515431,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.22833333333333333,
						"acc_norm,none": 0.22833333333333333,
						"acc_norm_stderr,none": 0.017150868516058564,
						"acc_stderr,none": 0.017150868516058564,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.16,
						"acc_norm,none": 0.16,
						"acc_norm_stderr,none": 0.011598902298689005,
						"acc_stderr,none": 0.011598902298689005,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.204,
						"acc_norm,none": 0.204,
						"acc_norm_stderr,none": 0.012749374359024387,
						"acc_stderr,none": 0.012749374359024387,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.172,
						"acc_norm,none": 0.172,
						"acc_norm_stderr,none": 0.011939788882495321,
						"acc_stderr,none": 0.011939788882495321,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.02808592343999728,
						"acc_stderr,none": 0.02808592343999728,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.01129723982340928,
						"acc_stderr,none": 0.01129723982340928,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.04063619567656726,
						"acc_stderr,none": 0.04063619567656726,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.0416333199893227,
						"acc_stderr,none": 0.0416333199893227,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.013493000446937594,
						"acc_stderr,none": 0.013493000446937594,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.221,
						"acc_norm,none": 0.221,
						"acc_norm_stderr,none": 0.013127502859696235,
						"acc_stderr,none": 0.013127502859696235,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.232,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.013354937452281557,
						"acc_stderr,none": 0.013354937452281557,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.122,
						"acc_norm,none": 0.122,
						"acc_norm_stderr,none": 0.010354864712936708,
						"acc_stderr,none": 0.010354864712936708,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.012155153135511961,
						"acc_stderr,none": 0.012155153135511961,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.157,
						"acc_norm,none": 0.157,
						"acc_norm_stderr,none": 0.011510146979230172,
						"acc_stderr,none": 0.011510146979230172,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.199,
						"acc_norm,none": 0.199,
						"acc_norm_stderr,none": 0.01263164908309918,
						"acc_stderr,none": 0.01263164908309918,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.012886662332274545,
						"acc_stderr,none": 0.012886662332274545,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.167,
						"acc_norm,none": 0.167,
						"acc_norm_stderr,none": 0.011800434324644586,
						"acc_stderr,none": 0.011800434324644586,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.013607356839598114,
						"acc_stderr,none": 0.013607356839598114,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.16,
						"acc_norm,none": 0.16,
						"acc_norm_stderr,none": 0.011598902298689004,
						"acc_stderr,none": 0.011598902298689004,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.219,
						"acc_norm,none": 0.219,
						"acc_norm_stderr,none": 0.013084731950262034,
						"acc_stderr,none": 0.013084731950262034,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.193,
						"acc_norm,none": 0.193,
						"acc_norm_stderr,none": 0.012486268734370098,
						"acc_stderr,none": 0.012486268734370098,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.229,
						"acc_norm,none": 0.229,
						"acc_norm_stderr,none": 0.01329419932661362,
						"acc_stderr,none": 0.01329419932661362,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.20166666666666666,
						"acc_norm,none": 0.20166666666666666,
						"acc_norm_stderr,none": 0.01639440955971654,
						"acc_stderr,none": 0.01639440955971654,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.17,
						"acc_norm,none": 0.17,
						"acc_norm_stderr,none": 0.011884495834541658,
						"acc_stderr,none": 0.011884495834541658,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.204,
						"acc_norm,none": 0.204,
						"acc_norm_stderr,none": 0.012749374359024386,
						"acc_stderr,none": 0.012749374359024386,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.201,
						"acc_norm,none": 0.201,
						"acc_norm_stderr,none": 0.01267910721461733,
						"acc_stderr,none": 0.01267910721461733,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.193,
						"acc_norm,none": 0.193,
						"acc_norm_stderr,none": 0.012486268734370101,
						"acc_stderr,none": 0.012486268734370101,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.0440844002276808,
						"acc_stderr,none": 0.0440844002276808,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.18333333333333332,
						"acc_norm,none": 0.18333333333333332,
						"acc_norm_stderr,none": 0.022377292466572545,
						"acc_stderr,none": 0.022377292466572545,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.013531522534515448,
						"acc_stderr,none": 0.013531522534515448,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.183,
						"acc_norm,none": 0.183,
						"acc_norm_stderr,none": 0.01223358739947783,
						"acc_stderr,none": 0.01223358739947783,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.012997843819031829,
						"acc_stderr,none": 0.012997843819031829,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.027234326551496855,
						"acc_stderr,none": 0.027234326551496855,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.181,
						"acc_norm,none": 0.181,
						"acc_norm_stderr,none": 0.012181436179177904,
						"acc_stderr,none": 0.012181436179177904,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920842,
						"acc_stderr,none": 0.013512312258920842,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699357,
						"acc_stderr,none": 0.028873315391699357,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.209,
						"acc_norm,none": 0.209,
						"acc_norm_stderr,none": 0.01286407728849933,
						"acc_stderr,none": 0.01286407728849933,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5277351458013594,
						"acc_norm,none": 0.526,
						"acc_norm_stderr,none": 0.0004996472945891778,
						"acc_stderr,none": 0.05014420963066275,
						"alias": "kobest",
						"f1,none": 0.4593965207472895,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5398860398860399,
						"acc_stderr,none": 0.013306226706072411,
						"alias": " - kobest_boolq",
						"f1,none": 0.47182389937106917,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.604,
						"acc_stderr,none": 0.01547331326585941,
						"alias": " - kobest_copa",
						"f1,none": 0.6026633595414976,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.388,
						"acc_norm,none": 0.526,
						"acc_norm_stderr,none": 0.02235279165091416,
						"acc_stderr,none": 0.02181430098478763,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.38557243373436717,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5944584382871536,
						"acc_stderr,none": 0.024673504551633432,
						"alias": " - kobest_sentineg",
						"f1,none": 0.5645773299816749,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6770813118571707,
						"acc_stderr,none": 0.016474273358248928,
						"alias": "lambada",
						"perplexity,none": 4.468392589321696,
						"perplexity_stderr,none": 0.28886798877698455
					},
					"lambada_cloze": {
						"acc,none": 0.07005627789637105,
						"acc_stderr,none": 0.025757074852232192,
						"alias": "lambada_cloze",
						"perplexity,none": 433.38508675165787,
						"perplexity_stderr,none": 126.49961730170455
					},
					"lambada_multilingual": {
						"acc,none": 0.5048709489617698,
						"acc_stderr,none": 0.0785743922588123,
						"alias": "lambada_multilingual",
						"perplexity,none": 28.052505514598444,
						"perplexity_stderr,none": 10.46454177414525
					},
					"lambada_openai": {
						"acc,none": 0.7079371240054337,
						"acc_stderr,none": 0.00633501423588443,
						"alias": " - lambada_openai",
						"perplexity,none": 3.924206178453125,
						"perplexity_stderr,none": 0.0821956408016066
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.01901804773918106,
						"acc_stderr,none": 0.0019029419850946558,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 684.0081975284239,
						"perplexity_stderr,none": 23.54875446648411
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3976324471181836,
						"acc_stderr,none": 0.006818420259588963,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 44.30120835710329,
						"perplexity_stderr,none": 2.5553513230291283
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7075490005821852,
						"acc_stderr,none": 0.006337484186544329,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.9264859702186374,
						"perplexity_stderr,none": 0.08237607666624414
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4292645061129439,
						"acc_stderr,none": 0.006895916655437447,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 38.604219309786316,
						"perplexity_stderr,none": 1.965044865230527
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5138754123811372,
						"acc_stderr,none": 0.006963294862063177,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 22.809199521367226,
						"perplexity_stderr,none": 1.174835435366836
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.47603337861439937,
						"acc_stderr,none": 0.0069579705549025995,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 30.621414414516742,
						"perplexity_stderr,none": 1.6856488952367699
					},
					"lambada_standard": {
						"acc,none": 0.6468076848437804,
						"acc_stderr,none": 0.006658942751641766,
						"alias": " - lambada_standard",
						"perplexity,none": 5.010583545621482,
						"perplexity_stderr,none": 0.11453751322292006
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.12109450805356103,
						"acc_stderr,none": 0.004545120330900577,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 182.76197597489175,
						"perplexity_stderr,none": 6.387551854087879
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.2340966921119593,
						"exact_match_stderr,get-answer": 0.010683080933862756
					},
					"logiqa": {
						"acc,none": 0.23195084485407066,
						"acc_norm,none": 0.29339477726574503,
						"acc_norm_stderr,none": 0.017859032704399504,
						"acc_stderr,none": 0.016555252497925894,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.23727735368956743,
						"acc_norm,none": 0.28498727735368956,
						"acc_norm_stderr,none": 0.011388893410930618,
						"acc_stderr,none": 0.010733055454349933,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.24120603015075376,
						"acc_norm,none": 0.254606365159129,
						"acc_norm_stderr,none": 0.007974951653806822,
						"acc_stderr,none": 0.007831710160500703,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.5942596907434865,
						"acc_stderr,none": 0.005053627363463858,
						"alias": "mc_taco",
						"f1,none": 0.39103481163567,
						"f1_stderr,none": 0.00779823280411256
					},
					"medmcqa": {
						"acc,none": 0.2689457327277074,
						"acc_norm,none": 0.2689457327277074,
						"acc_norm_stderr,none": 0.00685669960008816,
						"acc_stderr,none": 0.00685669960008816,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.27258444619010214,
						"acc_norm,none": 0.27258444619010214,
						"acc_norm_stderr,none": 0.012485279567743075,
						"acc_stderr,none": 0.012485279567743075,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.25345392394245836,
						"acc_stderr,none": 0.03836809743654213,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.038612291966536955,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.03785714465066653,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.20394736842105263,
						"acc_stderr,none": 0.03279000406310049,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.28679245283018867,
						"acc_stderr,none": 0.027834912527544067,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2708333333333333,
						"acc_stderr,none": 0.03716177437566018,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.03861229196653694,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909281,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2543352601156069,
						"acc_stderr,none": 0.0332055644308557,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.1568627450980392,
						"acc_stderr,none": 0.036186648199362466,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909284,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.32340425531914896,
						"acc_stderr,none": 0.030579442773610337,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03947152782669415,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.21379310344827587,
						"acc_stderr,none": 0.034165204477475494,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2566137566137566,
						"acc_stderr,none": 0.022494510767503154,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.037184890068181146,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.24838709677419354,
						"acc_stderr,none": 0.02458002892148101,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2660098522167488,
						"acc_stderr,none": 0.031089826002937523,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421276,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.28484848484848485,
						"acc_stderr,none": 0.035243908445117836,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.23737373737373738,
						"acc_stderr,none": 0.030313710538198896,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.22797927461139897,
						"acc_stderr,none": 0.03027690994517825,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2205128205128205,
						"acc_stderr,none": 0.02102067268082791,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_stderr,none": 0.02696242432507383,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.21008403361344538,
						"acc_stderr,none": 0.026461398717471874,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2119205298013245,
						"acc_stderr,none": 0.033367670865679766,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.23669724770642203,
						"acc_stderr,none": 0.018224078117299085,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.20833333333333334,
						"acc_stderr,none": 0.02769691071309394,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.25980392156862747,
						"acc_stderr,none": 0.030778554678693275,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2489451476793249,
						"acc_stderr,none": 0.028146970599422644,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.336322869955157,
						"acc_stderr,none": 0.031708824268455,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.22900763358778625,
						"acc_stderr,none": 0.036853466317118506,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2601487778958555,
						"acc_stderr,none": 0.028561424050715687,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2644628099173554,
						"acc_stderr,none": 0.04026187527591206,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.044531975073749834,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.22085889570552147,
						"acc_stderr,none": 0.03259177392742177,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.25892857142857145,
						"acc_stderr,none": 0.041577515398656284,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.23300970873786409,
						"acc_stderr,none": 0.041858325989283136,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2606837606837607,
						"acc_stderr,none": 0.028760348956523414,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2835249042145594,
						"acc_stderr,none": 0.016117318166832276,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.26011560693641617,
						"acc_stderr,none": 0.023618678310069356,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24134078212290502,
						"acc_stderr,none": 0.014310999547961445,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.24183006535947713,
						"acc_stderr,none": 0.024518195641879334,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.27164467331831355,
						"acc_stderr,none": 0.03908199961155891,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.27009646302250806,
						"acc_stderr,none": 0.02521804037341062,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.30246913580246915,
						"acc_stderr,none": 0.025557653981868055,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.26595744680851063,
						"acc_stderr,none": 0.026358065698880596,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.25684485006518903,
						"acc_stderr,none": 0.011158455853098858,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.02576725201085597,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.26633986928104575,
						"acc_stderr,none": 0.01788318813466721,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.34545454545454546,
						"acc_stderr,none": 0.04554619617541054,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.19183673469387755,
						"acc_stderr,none": 0.02520696315422539,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2349691257718557,
						"acc_stderr,none": 0.035604015670430735,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.21890547263681592,
						"acc_stderr,none": 0.029239174636647,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.24357754519505234,
						"acc_stderr,none": 0.04711762589408258,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.2710843373493976,
						"acc_stderr,none": 0.03460579907553027,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3157894736842105,
						"acc_stderr,none": 0.0356507967070831,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.39643402954661233,
						"acc_stderr,none": 0.004937701246259722,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3966639544344996,
						"acc_stderr,none": 0.004933920605836456,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.43137254901960786,
						"acc_stderr,none": 0.024549517375517627,
						"alias": "mrpc",
						"f1,none": 0.3136094674556213,
						"f1_stderr,none": 0.03279317395902456
					},
					"multimedqa": {
						"acc,none": 0.2943931866572037,
						"acc_norm,none": 0.27006373784249116,
						"acc_norm_stderr,none": 8.605444729523063e-05,
						"acc_stderr,none": 0.09291317724421184,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.40532178217821785,
						"acc_stderr,none": 0.0070518729223107085,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.691027089717964,
						"mrr_stderr,none": 0.01035936097867152,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4187358916478555,
						"r@2_stderr,none": 0.016583844316361184
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6424943584755366,
						"mrr_stderr,none": 0.010416128336552824,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.463882618510158,
						"r@2_stderr,none": 0.016763409667403396
					},
					"openbookqa": {
						"acc,none": 0.268,
						"acc_norm,none": 0.394,
						"acc_norm_stderr,none": 0.021874299301689253,
						"acc_stderr,none": 0.019827714859587564,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.432,
						"acc_stderr,none": 0.011079231683079106,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4365,
						"acc_stderr,none": 0.011092583003919652,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.447,
						"acc_stderr,none": 0.01112013168376774,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.543,
						"acc_stderr,none": 0.011141704034140802,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5565,
						"acc_stderr,none": 0.011111507899646485,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5505,
						"acc_stderr,none": 0.011125950223877364,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.513,
						"acc_stderr,none": 0.011179355482070377,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.49692857142857144,
						"acc_stderr,none": 0.04403144516789717,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7421109902067464,
						"acc_norm,none": 0.750816104461371,
						"acc_norm_stderr,none": 0.010091882770120216,
						"acc_stderr,none": 0.010206956662056269,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2647843723313407,
						"acc_norm,none": 0.2585930828351836,
						"acc_norm_stderr,none": 0.003198968177864627,
						"acc_stderr,none": 0.003223492698533857,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.021352091786223104,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7311307895223446,
						"acc_norm,none": 0.5389946142143349,
						"acc_norm_stderr,none": 0.00854315545271045,
						"acc_stderr,none": 0.13890739960129175,
						"alias": "pythia",
						"bits_per_byte,none": 0.6717306930962005,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5929828037968814,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.924206178453125,
						"perplexity_stderr,none": 0.0821956408016066,
						"word_perplexity,none": 12.059069823657135,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.32269503546099293,
						"acc_norm,none": 0.41134751773049644,
						"acc_norm_stderr,none": 0.056379014482506803,
						"acc_stderr,none": 0.03784542110540027,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.35833333333333334,
						"acc_norm,none": 0.5166666666666667,
						"acc_norm_stderr,none": 0.04580945392704764,
						"acc_stderr,none": 0.04395667801920048,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.40625,
						"acc_norm_stderr,none": 0.03894932504400619,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.33098591549295775,
						"acc_norm,none": 0.36971830985915494,
						"acc_norm_stderr,none": 0.02869522320315008,
						"acc_stderr,none": 0.027972363900546835,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5108914515833791,
						"acc_stderr,none": 0.006763805285029654,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.6782092505565175,
						"acc_stderr,none": 0.0023233900894452197,
						"alias": "qqp",
						"f1,none": 0.643600701293009,
						"f1_stderr,none": 0.002920369654115905
					},
					"race": {
						"acc,none": 0.3521531100478469,
						"acc_stderr,none": 0.014782629897202254,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5595667870036101,
						"acc_stderr,none": 0.029882123363118723,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.925,
						"acc_norm,none": 0.9,
						"acc_norm_stderr,none": 0.009491579957525049,
						"acc_stderr,none": 0.008333333333333363,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5595667870036101,
						"acc_stderr,none": 0.029882123363118723,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8807339449541285,
						"acc_stderr,none": 0.010981754158983057,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5500849745076477,
						"acc_norm,none": 0.7445266420073978,
						"acc_norm_stderr,none": 0.0030834978640596348,
						"acc_stderr,none": 0.003517311742788213,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5315962863132674,
						"acc_stderr,none": 0.02431298277346284,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5005008012820513,
						"acc_stderr,none": 0.005004252916283736,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.592986723421506,
						"acc_stderr,none": 0.004946023976825976,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5026470588235294,
						"acc_stderr,none": 0.004950911033212593,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3005070051292488,
						"acc_stderr,none": 0.0012789567297438122,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2937576499388005,
						"bleu_acc_stderr,none": 0.015945068581236604,
						"bleu_diff,none": -7.735023660480696,
						"bleu_diff_stderr,none": 0.7702991508502799,
						"bleu_max,none": 24.93657615966411,
						"bleu_max_stderr,none": 0.7711935927537135,
						"rouge1_acc,none": 0.2839657282741738,
						"rouge1_acc_stderr,none": 0.01578537085839671,
						"rouge1_diff,none": -9.865338598421463,
						"rouge1_diff_stderr,none": 0.8345101271358512,
						"rouge1_max,none": 49.861965407064524,
						"rouge1_max_stderr,none": 0.8519780444462061,
						"rouge2_acc,none": 0.22643818849449204,
						"rouge2_acc_stderr,none": 0.014651337324602593,
						"rouge2_diff,none": -12.067396511012621,
						"rouge2_diff_stderr,none": 1.0165379157789043,
						"rouge2_max,none": 33.71419121976144,
						"rouge2_max_stderr,none": 0.9770836179707644,
						"rougeL_acc,none": 0.2668298653610771,
						"rougeL_acc_stderr,none": 0.015483691939237258,
						"rougeL_diff,none": -10.524215373912806,
						"rougeL_diff_stderr,none": 0.8448810138863718,
						"rougeL_max,none": 46.776402003616276,
						"rougeL_max_stderr,none": 0.8657546054038466
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2937576499388005,
						"bleu_acc_stderr,none": 0.015945068581236604,
						"bleu_diff,none": -7.735023660480696,
						"bleu_diff_stderr,none": 0.7702991508502799,
						"bleu_max,none": 24.93657615966411,
						"bleu_max_stderr,none": 0.7711935927537135,
						"rouge1_acc,none": 0.2839657282741738,
						"rouge1_acc_stderr,none": 0.01578537085839671,
						"rouge1_diff,none": -9.865338598421463,
						"rouge1_diff_stderr,none": 0.8345101271358512,
						"rouge1_max,none": 49.861965407064524,
						"rouge1_max_stderr,none": 0.8519780444462061,
						"rouge2_acc,none": 0.22643818849449204,
						"rouge2_acc_stderr,none": 0.014651337324602593,
						"rouge2_diff,none": -12.067396511012621,
						"rouge2_diff_stderr,none": 1.0165379157789043,
						"rouge2_max,none": 33.71419121976144,
						"rouge2_max_stderr,none": 0.9770836179707644,
						"rougeL_acc,none": 0.2668298653610771,
						"rougeL_acc_stderr,none": 0.015483691939237258,
						"rougeL_diff,none": -10.524215373912806,
						"rougeL_diff_stderr,none": 0.8448810138863718,
						"rougeL_max,none": 46.776402003616276,
						"rougeL_max_stderr,none": 0.8657546054038466
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2350061199510404,
						"acc_stderr,none": 0.014843061507731613,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3660078903074573,
						"acc_stderr,none": 0.01383302039412064,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.0004921259842519685,
						"exact_match_stderr,none": 0.0004921259842519565
					},
					"wic": {
						"acc,none": 0.5344827586206896,
						"acc_stderr,none": 0.01976355284279699,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6717485796810253,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5930025537772075,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 12.05986934098055,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6361483820047356,
						"acc_stderr,none": 0.013521488896883415,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4225352112676056,
						"acc_stderr,none": 0.05903984205682581,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.04770204856076104,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8168498168498168,
						"acc_stderr,none": 0.023452564261705,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5952727272727273,
						"acc_stderr,none": 0.06103875896467541,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.022288147591176952,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.022331264423258383,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.02055326917420919,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.68,
						"acc_stderr,none": 0.020882340488761805,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407056,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.022311333245289663,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.021728881438701705,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.656,
						"acc_stderr,none": 0.02126575803797874,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.666,
						"acc_stderr,none": 0.021113492347743745,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4415796519410977,
						"acc_stderr,none": 0.04681755111612141,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3409638554216867,
						"acc_stderr,none": 0.009501591178361543,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4686746987951807,
						"acc_stderr,none": 0.010002384719762116,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4903614457831325,
						"acc_stderr,none": 0.010020210558438302,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.43614457831325304,
						"acc_stderr,none": 0.009940006562498589,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5365461847389559,
						"acc_stderr,none": 0.009995265580368933,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4799196787148594,
						"acc_stderr,none": 0.01001398741923408,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4987951807228916,
						"acc_stderr,none": 0.010022043771315561,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42730923694779116,
						"acc_stderr,none": 0.009915595034908124,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4831325301204819,
						"acc_stderr,none": 0.010016368453021547,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.37028112449799194,
						"acc_stderr,none": 0.009678915409840292,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.4248995983935743,
						"acc_stderr,none": 0.009908377568198195,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4502008032128514,
						"acc_stderr,none": 0.00997224029676889,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3963855421686747,
						"acc_stderr,none": 0.009804518520476653,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.44016064257028115,
						"acc_stderr,none": 0.009950040960088072,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.37991967871485943,
						"acc_stderr,none": 0.00972875845298786,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6071836832922207,
						"acc_stderr,none": 0.05979388819830518,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5684976836532097,
						"acc_stderr,none": 0.012745810046098411,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7557908669755129,
						"acc_stderr,none": 0.011055879511349603,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6790205162144275,
						"acc_stderr,none": 0.012014110213469813,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5499669093315684,
						"acc_stderr,none": 0.01280271359821984,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5790866975512905,
						"acc_stderr,none": 0.012705145598630686,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.642620780939775,
						"acc_stderr,none": 0.01233256908197468,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5155526141628061,
						"acc_stderr,none": 0.012860899111470788,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6499007279947054,
						"acc_stderr,none": 0.012275258369751088,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5274652547981469,
						"acc_stderr,none": 0.012847698270388211,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5744540039708802,
						"acc_stderr,none": 0.012723670419166324,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6366644606221046,
						"acc_stderr,none": 0.012377153306613268,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7779276241852101,
						"acc_stderr,none": 0.04102937959859529,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8425806451612903,
						"acc_stderr,none": 0.00755469162572208,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7590361445783133,
						"acc_stderr,none": 0.04722807605987255,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6777893639207507,
						"acc_stderr,none": 0.015098526178840365,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7756653992395437,
						"acc_stderr,none": 0.025771203207084713,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.653968253968254,
						"acc_stderr,none": 0.026845499021972877,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.751984126984127,
						"acc_stderr,none": 0.019255734203034475,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "SmerkyG/rwkv6-world-3b"
	},
	"TimeMobius/Mobius-RWKV-Chat-12B-128k-v4-HF": {
		"config": {
			"dtype=float16,trust_remote_code=True": {
				"confObj": {
					"dtype": "float16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=float16,trust_remote_code=True",
				"groups": {
					"mmlu": {
						"acc,none": 0.28122774533542233,
						"acc_stderr,none": 0.0450872087727283,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.27460148777895854,
						"acc_stderr,none": 0.04029110005482576,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.30737045381396844,
						"acc_stderr,none": 0.03812757662915217,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2882677933051674,
						"acc_stderr,none": 0.04401154310344449,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.25848398350777035,
						"acc_stderr,none": 0.05098164266958067,
						"alias": " - stem"
					}
				},
				"results": {
					"arc_challenge": {
						"acc,none": 0.4206484641638225,
						"acc_norm,none": 0.4641638225255973,
						"acc_norm_stderr,none": 0.01457381366473572,
						"acc_stderr,none": 0.014426211252508394,
						"alias": "arc_challenge"
					},
					"mmlu": {
						"acc,none": 0.28122774533542233,
						"acc_stderr,none": 0.0450872087727283,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.03591444084196969,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.2565789473684211,
						"acc_stderr,none": 0.0355418036802569,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.29056603773584905,
						"acc_stderr,none": 0.02794321998933714,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2986111111111111,
						"acc_stderr,none": 0.03827052357950756,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.04605661864718381,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2774566473988439,
						"acc_stderr,none": 0.034140140070440354,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.1568627450980392,
						"acc_stderr,none": 0.03618664819936246,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2936170212765957,
						"acc_stderr,none": 0.02977164271249123,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2719298245614035,
						"acc_stderr,none": 0.04185774424022056,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.25517241379310346,
						"acc_stderr,none": 0.03632984052707842,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.25132275132275134,
						"acc_stderr,none": 0.022340482339643895,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.04073524322147126,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.29354838709677417,
						"acc_stderr,none": 0.025906087021319295,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.21674876847290642,
						"acc_stderr,none": 0.028990331252516235,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768079,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3515151515151515,
						"acc_stderr,none": 0.0372820699868265,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.20202020202020202,
						"acc_stderr,none": 0.028606204289229862,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.32642487046632124,
						"acc_stderr,none": 0.033840286211432945,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3076923076923077,
						"acc_stderr,none": 0.02340092891831049,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.027309140588230182,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2815126050420168,
						"acc_stderr,none": 0.029213549414372184,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.25165562913907286,
						"acc_stderr,none": 0.035433042343899844,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.25137614678899084,
						"acc_stderr,none": 0.018599206360287415,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.1574074074074074,
						"acc_stderr,none": 0.024837173518242384,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.28431372549019607,
						"acc_stderr,none": 0.03166009679399813,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.33755274261603374,
						"acc_stderr,none": 0.030781549102026216,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.34977578475336324,
						"acc_stderr,none": 0.03200736719484503,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.33587786259541985,
						"acc_stderr,none": 0.041423137719966634,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.27460148777895854,
						"acc_stderr,none": 0.04029110005482576,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.04065578140908705,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.04330043749650742,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3067484662576687,
						"acc_stderr,none": 0.036230899157241474,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.29464285714285715,
						"acc_stderr,none": 0.04327040932578728,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.32038834951456313,
						"acc_stderr,none": 0.046202840822800406,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.32905982905982906,
						"acc_stderr,none": 0.030782321577688152,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.3243933588761175,
						"acc_stderr,none": 0.016740929047162706,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2861271676300578,
						"acc_stderr,none": 0.024332146779134128,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24916201117318434,
						"acc_stderr,none": 0.014465893829859933,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.31699346405228757,
						"acc_stderr,none": 0.02664327847450875,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.30737045381396844,
						"acc_stderr,none": 0.03812757662915217,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2347266881028939,
						"acc_stderr,none": 0.024071805887677045,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3271604938271605,
						"acc_stderr,none": 0.026105673861409814,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25886524822695034,
						"acc_stderr,none": 0.026129572527180848,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.25097783572359844,
						"acc_stderr,none": 0.01107373029918723,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.27941176470588236,
						"acc_stderr,none": 0.02725720260611495,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.28431372549019607,
						"acc_stderr,none": 0.01824902441120766,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.37272727272727274,
						"acc_stderr,none": 0.04631381319425464,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.30612244897959184,
						"acc_stderr,none": 0.029504896454595964,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2882677933051674,
						"acc_stderr,none": 0.04401154310344449,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.27860696517412936,
						"acc_stderr,none": 0.031700561834973086,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.25848398350777035,
						"acc_stderr,none": 0.05098164266958067,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.27710843373493976,
						"acc_stderr,none": 0.03484331592680589,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3508771929824561,
						"acc_stderr,none": 0.03660298834049164,
						"alias": "  - world_religions"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.40849124198630155,
						"acc_stderr,none": 0.014306268058424628,
						"alias": "truthfulqa_mc2"
					},
					"winogrande": {
						"acc,none": 0.6716653512233622,
						"acc_stderr,none": 0.013198299449717888,
						"alias": "winogrande"
					}
				}
			}
		},
		"name": "TimeMobius/Mobius-RWKV-Chat-12B-128k-v4-HF"
	},
	"TinyLlama/TinyLlama-1.1B-Chat-v1.0": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5098647125140925,
						"acc_norm,none": 0.47322435174746336,
						"acc_norm_stderr,none": 0.035307564164197114,
						"acc_stderr,none": 0.04989607138217046,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.331875,
						"acc_stderr,none": 0.0159936274485604,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.1809,
						"acc_stderr,none": 0.13641611458142414,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8202835820895522,
						"acc_stderr,none": 0.15066446956470908,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2555720653789004,
						"acc_norm,none": 0.2555720653789004,
						"acc_norm_stderr,none": 0.11507020716088578,
						"acc_stderr,none": 0.11507020716088578,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2485753755828009,
						"acc_norm,none": 0.2485753755828009,
						"acc_norm_stderr,none": 0.03699421492360297,
						"acc_stderr,none": 0.03699421492360297,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.8716556484242886,
						"likelihood_diff_stderr,none": 0.4094923291682157,
						"pct_stereotype,none": 0.5778175313059034,
						"pct_stereotype_stderr,none": 0.07445825319236767
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.02214566929133858,
						"exact_match_stderr,none": 0.0032653258401622418
					},
					"glue": {
						"acc,none": 0.4911121295424004,
						"acc_stderr,none": 0.05579164053964464,
						"alias": "glue",
						"f1,none": 0.37231918569958183,
						"f1_stderr,none": 0.0017896974330622207,
						"mcc,none": -0.017261904112053045,
						"mcc_stderr,none": 0.00090590182508307
					},
					"kmmlu": {
						"acc,none": 0.25873520069304073,
						"acc_norm,none": 0.25873520069304073,
						"acc_norm_stderr,none": 0.027112034351936317,
						"acc_stderr,none": 0.027112034351936317,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.48081561061170797,
						"acc_norm,none": 0.444,
						"acc_norm_stderr,none": 0.0004947174348697385,
						"acc_stderr,none": 0.0403106549260379,
						"alias": "kobest",
						"f1,none": 0.3828631000929086,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5606442848825927,
						"acc_stderr,none": 0.025309228284552565,
						"alias": "lambada",
						"perplexity,none": 7.97760922995255,
						"perplexity_stderr,none": 1.0328407259597316
					},
					"lambada_cloze": {
						"acc,none": 0.024451775664661363,
						"acc_stderr,none": 0.004101598453044792,
						"alias": "lambada_cloze",
						"perplexity,none": 706.2427173090024,
						"perplexity_stderr,none": 67.11171869741892
					},
					"lambada_multilingual": {
						"acc,none": 0.38001164370269747,
						"acc_stderr,none": 0.06678689076293869,
						"alias": "lambada_multilingual",
						"perplexity,none": 97.5692210616895,
						"perplexity_stderr,none": 28.905811055132286
					},
					"mmlu": {
						"acc,none": 0.24747187010397378,
						"acc_stderr,none": 0.03738317829624497,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2499468650371945,
						"acc_stderr,none": 0.02570536381708068,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2568393949147087,
						"acc_stderr,none": 0.04659847936311328,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.23626909327266818,
						"acc_stderr,none": 0.0327684387917673,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.24548049476688868,
						"acc_stderr,none": 0.044201259805433206,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.27622427253371185,
						"acc_norm,none": 0.25152089258184174,
						"acc_norm_stderr,none": 9.418663501048668e-05,
						"acc_stderr,none": 0.08723589266520539,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.48264285714285715,
						"acc_stderr,none": 0.043170460826796556,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7041466981903363,
						"acc_norm,none": 0.4808648942082854,
						"acc_norm_stderr,none": 0.003947831490021625,
						"acc_stderr,none": 0.14699322403394063,
						"alias": "pythia",
						"bits_per_byte,none": 0.7285090329011088,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6569258405092875,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.963786625754829,
						"perplexity_stderr,none": 0.15922492829117155,
						"word_perplexity,none": 14.883749336326465,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3670212765957447,
						"acc_norm,none": 0.41312056737588654,
						"acc_norm_stderr,none": 0.04665636856731557,
						"acc_stderr,none": 0.03554258710476385,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5029117167481948,
						"acc_stderr,none": 0.011040713804904616,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.30469795992087373,
						"acc_stderr,none": 0.001552805516278786,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.016339170373280906,
						"bleu_diff,none": -5.725790667225637,
						"bleu_diff_stderr,none": 0.6965950873789364,
						"bleu_max,none": 21.34215767891346,
						"bleu_max_stderr,none": 0.7251541501163864,
						"rouge1_acc,none": 0.2802937576499388,
						"rouge1_acc_stderr,none": 0.015723139524608753,
						"rouge1_diff,none": -8.51446913870341,
						"rouge1_diff_stderr,none": 0.7821992957195486,
						"rouge1_max,none": 45.09414157085349,
						"rouge1_max_stderr,none": 0.8775283766073789,
						"rouge2_acc,none": 0.22766217870257038,
						"rouge2_acc_stderr,none": 0.014679255032111066,
						"rouge2_diff,none": -9.266174219776689,
						"rouge2_diff_stderr,none": 0.9113877449872622,
						"rouge2_max,none": 28.42656401759116,
						"rouge2_max_stderr,none": 0.9559408372967046,
						"rougeL_acc,none": 0.2876376988984088,
						"rougeL_acc_stderr,none": 0.015846315101394805,
						"rougeL_diff,none": -8.446714023463292,
						"rougeL_diff_stderr,none": 0.7898948550543593,
						"rougeL_max,none": 42.18826384118312,
						"rougeL_max_stderr,none": 0.8744877426092712
					},
					"xcopa": {
						"acc,none": 0.5289090909090909,
						"acc_stderr,none": 0.031702490482383716,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.38827309236947793,
						"acc_stderr,none": 0.053150754335593575,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5399795439504242,
						"acc_stderr,none": 0.06000317291976968,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7125196673409755,
						"acc_stderr,none": 0.07442833285970002,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5098647125140925,
						"acc_norm,none": 0.47322435174746336,
						"acc_norm_stderr,none": 0.035307564164197114,
						"acc_stderr,none": 0.04989607138217046,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.331875,
						"acc_stderr,none": 0.0159936274485604,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.014933117490932575,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.314,
						"acc_stderr,none": 0.014683991951087967,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3441666666666667,
						"acc_stderr,none": 0.013720551062295755,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3003412969283277,
						"acc_norm,none": 0.3293515358361775,
						"acc_norm_stderr,none": 0.013734057652635473,
						"acc_stderr,none": 0.013395909309957005,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6132154882154882,
						"acc_norm,none": 0.5441919191919192,
						"acc_norm_stderr,none": 0.01021963176343785,
						"acc_stderr,none": 0.009993308355370972,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.1809,
						"acc_stderr,none": 0.13641611458142414,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.021,
						"acc_stderr,none": 0.00320696777675746,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.459,
						"acc_stderr,none": 0.011145474902641254,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0975,
						"acc_stderr,none": 0.0066346728963996154,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.4205,
						"acc_stderr,none": 0.011040870681821415,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.343,
						"acc_stderr,none": 0.010617526356593665,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.2425,
						"acc_stderr,none": 0.009586074348277476,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0865,
						"acc_stderr,none": 0.00628718055408464,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.099,
						"acc_stderr,none": 0.006679955905951289,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0145,
						"acc_stderr,none": 0.002673658397142789,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0255,
						"acc_stderr,none": 0.00352577516941629,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0021691973969631237,
						"acc_stderr,none": 0.0009692521054558507,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8202835820895522,
						"acc_stderr,none": 0.15066446956470908,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936696,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448834,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844881,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.781,
						"acc_stderr,none": 0.013084731950262026,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890129,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.761,
						"acc_stderr,none": 0.013493000446937594,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.015818160898606715,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.814,
						"acc_stderr,none": 0.012310790208412794,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837954,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.002987963843142658,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910605,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639233,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406117,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.007088105617246439,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.010878848714333313,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942284,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.005814534272734954,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787733,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.725,
						"acc_stderr,none": 0.01412708655649053,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.771,
						"acc_stderr,none": 0.013294199326613614,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.823,
						"acc_stderr,none": 0.012075463420375061,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866435,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837957,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045087,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.515,
						"acc_stderr,none": 0.015812179641814902,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525042,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.796,
						"acc_stderr,none": 0.012749374359024384,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.645,
						"acc_stderr,none": 0.015139491543780532,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.826,
						"acc_stderr,none": 0.011994493230973421,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099212,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400241,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406727,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.00997775303139722,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.535,
						"acc_stderr,none": 0.015780495050030156,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.010131468138757002,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.015572363292015097,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.01581119837311488,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.594,
						"acc_stderr,none": 0.0155372264386346,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.0077436402269193075,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.00957536880165389,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963528035,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787731,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.748,
						"acc_stderr,none": 0.013736254390651154,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487905,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987295,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.01265543994336665,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.725,
						"acc_stderr,none": 0.014127086556490528,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.401,
						"acc_stderr,none": 0.01550610974549832,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474914,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713329,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426474,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.757,
						"acc_stderr,none": 0.013569640199177425,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.463,
						"acc_stderr,none": 0.015775927227262416,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946094,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.837,
						"acc_stderr,none": 0.011686212712746835,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.674,
						"acc_stderr,none": 0.01483050720454104,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235246,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.0110434576993782,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890115,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665546,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523719,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936711,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.0077997330618320105,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.391,
						"acc_stderr,none": 0.015438826294681783,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.318,
						"acc_stderr,none": 0.014734079309311905,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6107033639143731,
						"acc_stderr,none": 0.008528016290984541,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.058387420812114225,
						"alias": "cb",
						"f1,none": 0.2407230196703881,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2555720653789004,
						"acc_norm,none": 0.2555720653789004,
						"acc_norm_stderr,none": 0.11507020716088578,
						"acc_stderr,none": 0.11507020716088578,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.058172215566282534,
						"acc_stderr,none": 0.058172215566282534,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.06818181818181816,
						"acc_stderr,none": 0.06818181818181816,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122592,
						"acc_stderr,none": 0.08503766788122592,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.23404255319148937,
						"acc_norm,none": 0.23404255319148937,
						"acc_norm_stderr,none": 0.06242676343682882,
						"acc_stderr,none": 0.06242676343682882,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.09829463743659811,
						"acc_stderr,none": 0.09829463743659811,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.059278386873217015,
						"acc_stderr,none": 0.059278386873217015,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.11967838846954226,
						"acc_stderr,none": 0.11967838846954226,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031766,
						"acc_stderr,none": 0.07633651333031766,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031766,
						"acc_stderr,none": 0.07633651333031766,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.09176629354822471,
						"acc_stderr,none": 0.09176629354822471,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033673,
						"acc_stderr,none": 0.10083169033033673,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.052631578947368404,
						"acc_stderr,none": 0.052631578947368404,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.1724137931034483,
						"acc_norm,none": 0.1724137931034483,
						"acc_norm_stderr,none": 0.07138609234576077,
						"acc_stderr,none": 0.07138609234576077,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.3469387755102041,
						"acc_norm,none": 0.3469387755102041,
						"acc_norm_stderr,none": 0.06870411522695292,
						"acc_stderr,none": 0.06870411522695292,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.3409090909090909,
						"acc_norm,none": 0.3409090909090909,
						"acc_norm_stderr,none": 0.07228658768525043,
						"acc_stderr,none": 0.07228658768525043,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.061487546190134544,
						"acc_stderr,none": 0.061487546190134544,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2485753755828009,
						"acc_norm,none": 0.2485753755828009,
						"acc_norm_stderr,none": 0.03699421492360297,
						"acc_stderr,none": 0.03699421492360297,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516737,
						"acc_stderr,none": 0.03385633936516737,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03571428571428571,
						"acc_stderr,none": 0.03571428571428571,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.03494959016177541,
						"acc_stderr,none": 0.03494959016177541,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.03404916326237584,
						"acc_stderr,none": 0.03404916326237584,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139405,
						"acc_stderr,none": 0.03374402644139405,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.24401913875598086,
						"acc_norm,none": 0.24401913875598086,
						"acc_norm_stderr,none": 0.029780753228706103,
						"acc_stderr,none": 0.029780753228706103,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.03404916326237584,
						"acc_stderr,none": 0.03404916326237584,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.24427480916030533,
						"acc_norm,none": 0.24427480916030533,
						"acc_norm_stderr,none": 0.037683359597287434,
						"acc_stderr,none": 0.037683359597287434,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2426470588235294,
						"acc_norm,none": 0.2426470588235294,
						"acc_norm_stderr,none": 0.036895193269968055,
						"acc_stderr,none": 0.036895193269968055,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.24299065420560748,
						"acc_norm,none": 0.24299065420560748,
						"acc_norm_stderr,none": 0.0416574299896527,
						"acc_stderr,none": 0.0416574299896527,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25386996904024767,
						"acc_norm,none": 0.25386996904024767,
						"acc_norm_stderr,none": 0.02425409025245805,
						"acc_stderr,none": 0.02425409025245805,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604243,
						"acc_stderr,none": 0.030587591351604243,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2569832402234637,
						"acc_norm,none": 0.2569832402234637,
						"acc_norm_stderr,none": 0.03275229252356167,
						"acc_stderr,none": 0.03275229252356167,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24050632911392406,
						"acc_norm,none": 0.24050632911392406,
						"acc_norm_stderr,none": 0.027820781981149678,
						"acc_stderr,none": 0.027820781981149678,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.20754716981132076,
						"acc_norm,none": 0.20754716981132076,
						"acc_norm_stderr,none": 0.039577692383779325,
						"acc_stderr,none": 0.039577692383779325,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.308411214953271,
						"acc_norm,none": 0.308411214953271,
						"acc_norm_stderr,none": 0.04485760883316699,
						"acc_stderr,none": 0.04485760883316699,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.043505468189990605,
						"acc_stderr,none": 0.043505468189990605,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.28703703703703703,
						"acc_norm,none": 0.28703703703703703,
						"acc_norm_stderr,none": 0.043733130409147614,
						"acc_stderr,none": 0.043733130409147614,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.04117581097845102,
						"acc_stderr,none": 0.04117581097845102,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.04022559246936713,
						"acc_stderr,none": 0.04022559246936713,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2673992673992674,
						"acc_norm,none": 0.2673992673992674,
						"acc_norm_stderr,none": 0.026836713439088868,
						"acc_stderr,none": 0.026836713439088868,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03039153369274154,
						"acc_stderr,none": 0.03039153369274154,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.24561403508771928,
						"acc_norm,none": 0.24561403508771928,
						"acc_norm_stderr,none": 0.03301405946987251,
						"acc_stderr,none": 0.03301405946987251,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.23741007194244604,
						"acc_norm,none": 0.23741007194244604,
						"acc_norm_stderr,none": 0.036220593237998276,
						"acc_stderr,none": 0.036220593237998276,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.23270440251572327,
						"acc_norm,none": 0.23270440251572327,
						"acc_norm_stderr,none": 0.03361670240809546,
						"acc_stderr,none": 0.03361670240809546,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.22699386503067484,
						"acc_norm,none": 0.22699386503067484,
						"acc_norm_stderr,none": 0.03291099578615769,
						"acc_stderr,none": 0.03291099578615769,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.24206349206349206,
						"acc_norm,none": 0.24206349206349206,
						"acc_norm_stderr,none": 0.027036109679236968,
						"acc_stderr,none": 0.027036109679236968,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.1919191919191919,
						"acc_norm,none": 0.1919191919191919,
						"acc_norm_stderr,none": 0.02805779167298902,
						"acc_stderr,none": 0.02805779167298902,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.226890756302521,
						"acc_norm,none": 0.226890756302521,
						"acc_norm_stderr,none": 0.027205371538279472,
						"acc_stderr,none": 0.027205371538279472,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24782608695652175,
						"acc_norm,none": 0.24782608695652175,
						"acc_norm_stderr,none": 0.028530862595410066,
						"acc_stderr,none": 0.028530862595410066,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174021,
						"acc_stderr,none": 0.03749850709174021,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.036421927837417066,
						"acc_stderr,none": 0.036421927837417066,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.24431818181818182,
						"acc_norm,none": 0.24431818181818182,
						"acc_norm_stderr,none": 0.03248092256353737,
						"acc_stderr,none": 0.03248092256353737,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.26174496644295303,
						"acc_norm,none": 0.26174496644295303,
						"acc_norm_stderr,none": 0.03613362391075455,
						"acc_stderr,none": 0.03613362391075455,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331528,
						"acc_stderr,none": 0.03360300796331528,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552484,
						"acc_stderr,none": 0.03703667194552484,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2711864406779661,
						"acc_norm,none": 0.2711864406779661,
						"acc_norm_stderr,none": 0.04110070549339208,
						"acc_stderr,none": 0.04110070549339208,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.3048780487804878,
						"acc_norm,none": 0.3048780487804878,
						"acc_norm_stderr,none": 0.03605784583600454,
						"acc_stderr,none": 0.03605784583600454,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878284,
						"acc_stderr,none": 0.04122066502878284,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.036421927837417066,
						"acc_stderr,none": 0.036421927837417066,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.23015873015873015,
						"acc_norm,none": 0.23015873015873015,
						"acc_norm_stderr,none": 0.03764950879790607,
						"acc_stderr,none": 0.03764950879790607,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2810810810810811,
						"acc_norm,none": 0.2810810810810811,
						"acc_norm_stderr,none": 0.03313956873549873,
						"acc_stderr,none": 0.03313956873549873,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.033366051897610625,
						"acc_stderr,none": 0.033366051897610625,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25304136253041365,
						"acc_norm,none": 0.25304136253041365,
						"acc_norm_stderr,none": 0.021470991853398288,
						"acc_stderr,none": 0.021470991853398288,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2336448598130841,
						"acc_norm,none": 0.2336448598130841,
						"acc_norm_stderr,none": 0.02899368065323258,
						"acc_stderr,none": 0.02899368065323258,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.21951219512195122,
						"acc_norm,none": 0.21951219512195122,
						"acc_norm_stderr,none": 0.03747420876084759,
						"acc_stderr,none": 0.03747420876084759,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.23770491803278687,
						"acc_norm,none": 0.23770491803278687,
						"acc_norm_stderr,none": 0.03869794984381156,
						"acc_stderr,none": 0.03869794984381156,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24285714285714285,
						"acc_norm,none": 0.24285714285714285,
						"acc_norm_stderr,none": 0.02966137041396583,
						"acc_stderr,none": 0.02966137041396583,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2388888888888889,
						"acc_norm,none": 0.2388888888888889,
						"acc_norm_stderr,none": 0.03187098535605761,
						"acc_stderr,none": 0.03187098535605761,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03196107138009968,
						"acc_stderr,none": 0.03196107138009968,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.041265147363240995,
						"acc_stderr,none": 0.041265147363240995,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.03565998174135302,
						"acc_stderr,none": 0.03565998174135302,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055043,
						"acc_stderr,none": 0.04232473532055043,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.03183348654463749,
						"acc_stderr,none": 0.03183348654463749,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.03011304016776725,
						"acc_stderr,none": 0.03011304016776725,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.24468085106382978,
						"acc_norm,none": 0.24468085106382978,
						"acc_norm_stderr,none": 0.022199827758281308,
						"acc_stderr,none": 0.022199827758281308,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.23275862068965517,
						"acc_norm,none": 0.23275862068965517,
						"acc_norm_stderr,none": 0.02780436020996173,
						"acc_stderr,none": 0.02780436020996173,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.032534138484822554,
						"acc_stderr,none": 0.032534138484822554,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.03712537833614866,
						"acc_stderr,none": 0.03712537833614866,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.26548672566371684,
						"acc_norm,none": 0.26548672566371684,
						"acc_norm_stderr,none": 0.02943946890825876,
						"acc_stderr,none": 0.02943946890825876,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.03273943999002353,
						"acc_stderr,none": 0.03273943999002353,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.033603007963315265,
						"acc_stderr,none": 0.033603007963315265,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2360248447204969,
						"acc_norm,none": 0.2360248447204969,
						"acc_norm_stderr,none": 0.03357055232967968,
						"acc_stderr,none": 0.03357055232967968,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.034621578458651416,
						"acc_stderr,none": 0.034621578458651416,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.017261904112053045,
						"mcc_stderr,none": 0.0300982030208295
					},
					"copa": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.04292346959909284,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.8716556484242886,
						"likelihood_diff_stderr,none": 0.4094923291682157,
						"pct_stereotype,none": 0.5778175313059034,
						"pct_stereotype_stderr,none": 0.07445825319236767
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.79523398070088,
						"likelihood_diff_stderr,none": 0.08892457286942979,
						"pct_stereotype,none": 0.6249254621347644,
						"pct_stereotype_stderr,none": 0.011825946073917683
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.170768171876341,
						"likelihood_diff_stderr,none": 0.40157629770375797,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.04865042554105199
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.765890294855291,
						"likelihood_diff_stderr,none": 1.879578323879994,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.729961864764874,
						"likelihood_diff_stderr,none": 0.6550623759168775,
						"pct_stereotype,none": 0.6461538461538462,
						"pct_stereotype_stderr,none": 0.05977027026123098
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 3.0871726632118226,
						"likelihood_diff_stderr,none": 0.1974547915054822,
						"pct_stereotype,none": 0.615625,
						"pct_stereotype_stderr,none": 0.0272358133313715
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.4850791736885354,
						"likelihood_diff_stderr,none": 0.21255881604587118,
						"pct_stereotype,none": 0.5972222222222222,
						"pct_stereotype_stderr,none": 0.03344887382997866
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.7458700603908963,
						"likelihood_diff_stderr,none": 0.3352290174894974,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.629897924858754,
						"likelihood_diff_stderr,none": 0.15031467863123033,
						"pct_stereotype,none": 0.5118110236220472,
						"pct_stereotype_stderr,none": 0.022199583294816923
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.0591246287028,
						"likelihood_diff_stderr,none": 0.3532927703277935,
						"pct_stereotype,none": 0.7297297297297297,
						"pct_stereotype_stderr,none": 0.04234321361084539
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.058793693460444,
						"likelihood_diff_stderr,none": 0.47850266774541145,
						"pct_stereotype,none": 0.8387096774193549,
						"pct_stereotype_stderr,none": 0.03834564688497145
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.050987464503238,
						"likelihood_diff_stderr,none": 0.24173156052865147,
						"pct_stereotype,none": 0.7052631578947368,
						"pct_stereotype_stderr,none": 0.03316361842984286
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.9500068045828267,
						"likelihood_diff_stderr,none": 0.09360763474113683,
						"pct_stereotype,none": 0.531902206320811,
						"pct_stereotype_stderr,none": 0.012188413676219005
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.71391118367513,
						"likelihood_diff_stderr,none": 0.3513549735855366,
						"pct_stereotype,none": 0.4222222222222222,
						"pct_stereotype_stderr,none": 0.05235473399540658
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.0775730426494894,
						"likelihood_diff_stderr,none": 0.8966470387698565,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.818497050892223,
						"likelihood_diff_stderr,none": 0.4257281786258145,
						"pct_stereotype,none": 0.5909090909090909,
						"pct_stereotype_stderr,none": 0.06098367211363066
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.7337361154526567,
						"likelihood_diff_stderr,none": 0.18844713355701836,
						"pct_stereotype,none": 0.48286604361370716,
						"pct_stereotype_stderr,none": 0.027934433698537306
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.362727470548728,
						"likelihood_diff_stderr,none": 0.23878638755029585,
						"pct_stereotype,none": 0.383399209486166,
						"pct_stereotype_stderr,none": 0.030628616122857784
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 4.151279343499078,
						"likelihood_diff_stderr,none": 0.46146955570455217,
						"pct_stereotype,none": 0.6111111111111112,
						"pct_stereotype_stderr,none": 0.057855371034784615
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.7524351368779723,
						"likelihood_diff_stderr,none": 0.2015476937412259,
						"pct_stereotype,none": 0.5608695652173913,
						"pct_stereotype_stderr,none": 0.02316441640598207
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.3747086566427478,
						"likelihood_diff_stderr,none": 0.3378965181956233,
						"pct_stereotype,none": 0.5391304347826087,
						"pct_stereotype_stderr,none": 0.04668566114758418
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 4.889362167526078,
						"likelihood_diff_stderr,none": 0.4043611921428264,
						"pct_stereotype,none": 0.7252747252747253,
						"pct_stereotype_stderr,none": 0.04705213398778437
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.9907516946597976,
						"likelihood_diff_stderr,none": 0.2643940341620578,
						"pct_stereotype,none": 0.6224489795918368,
						"pct_stereotype_stderr,none": 0.03471541794449721
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.02214566929133858,
						"exact_match_stderr,none": 0.0032653258401622418
					},
					"glue": {
						"acc,none": 0.4911121295424004,
						"acc_stderr,none": 0.05579164053964464,
						"alias": "glue",
						"f1,none": 0.37231918569958183,
						"f1_stderr,none": 0.0017896974330622207,
						"mcc,none": -0.017261904112053045,
						"mcc_stderr,none": 0.00090590182508307
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.01592115238817286,
						"exact_match_stderr,get-answer": 0.003447819272389013
					},
					"hellaswag": {
						"acc,none": 0.4654451304521012,
						"acc_norm,none": 0.6046604262099183,
						"acc_norm_stderr,none": 0.004879242848473459,
						"acc_stderr,none": 0.004977851161904399,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.25873520069304073,
						"acc_norm,none": 0.25873520069304073,
						"acc_norm_stderr,none": 0.027112034351936317,
						"acc_stderr,none": 0.027112034351936317,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.040936018074033256,
						"acc_stderr,none": 0.040936018074033256,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361428,
						"acc_stderr,none": 0.014498627873361428,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796257,
						"acc_stderr,none": 0.013996674851796257,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.01392928659425972,
						"acc_stderr,none": 0.01392928659425972,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.014095022868717574,
						"acc_stderr,none": 0.014095022868717574,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.01719475014029891,
						"acc_stderr,none": 0.01719475014029891,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986187,
						"acc_stderr,none": 0.014062601350986187,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.013825416526895024,
						"acc_stderr,none": 0.013825416526895024,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.014221154708434939,
						"acc_stderr,none": 0.014221154708434939,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.030488073292114223,
						"acc_stderr,none": 0.030488073292114223,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361427,
						"acc_stderr,none": 0.014498627873361427,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.26153846153846155,
						"acc_norm,none": 0.26153846153846155,
						"acc_norm_stderr,none": 0.03869339773766237,
						"acc_stderr,none": 0.03869339773766237,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.04512608598542126,
						"acc_stderr,none": 0.04512608598542126,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.209,
						"acc_norm,none": 0.209,
						"acc_norm_stderr,none": 0.012864077288499347,
						"acc_stderr,none": 0.012864077288499347,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.204,
						"acc_norm,none": 0.204,
						"acc_norm_stderr,none": 0.012749374359024396,
						"acc_stderr,none": 0.012749374359024396,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740666,
						"acc_stderr,none": 0.014142984975740666,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.284,
						"acc_norm,none": 0.284,
						"acc_norm_stderr,none": 0.014267009061031316,
						"acc_stderr,none": 0.014267009061031316,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462615,
						"acc_stderr,none": 0.014078856992462615,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.251,
						"acc_norm,none": 0.251,
						"acc_norm_stderr,none": 0.01371813351688892,
						"acc_stderr,none": 0.01371813351688892,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809942,
						"acc_stderr,none": 0.013963164754809942,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740666,
						"acc_stderr,none": 0.014142984975740666,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.04163331998932269,
						"acc_stderr,none": 0.04163331998932269,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.013963164754809958,
						"acc_stderr,none": 0.013963164754809958,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.01386041525752791,
						"acc_stderr,none": 0.01386041525752791,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.014221154708434916,
						"acc_stderr,none": 0.014221154708434916,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881428,
						"acc_stderr,none": 0.013588548437881428,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986186,
						"acc_stderr,none": 0.014062601350986186,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.013736254390651138,
						"acc_stderr,none": 0.013736254390651138,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.018032386001530083,
						"acc_stderr,none": 0.018032386001530083,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.233,
						"acc_norm,none": 0.233,
						"acc_norm_stderr,none": 0.013374972519220056,
						"acc_stderr,none": 0.013374972519220056,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.299,
						"acc_norm,none": 0.299,
						"acc_norm_stderr,none": 0.014484778521220477,
						"acc_stderr,none": 0.014484778521220477,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.219,
						"acc_norm,none": 0.219,
						"acc_norm_stderr,none": 0.01308473195026204,
						"acc_stderr,none": 0.01308473195026204,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.014236526215291341,
						"acc_stderr,none": 0.014236526215291341,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.042923469599092816,
						"acc_stderr,none": 0.042923469599092816,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.024698855131686855,
						"acc_stderr,none": 0.024698855131686855,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.249,
						"acc_norm,none": 0.249,
						"acc_norm_stderr,none": 0.013681600278702308,
						"acc_stderr,none": 0.013681600278702308,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.013772206565168543,
						"acc_stderr,none": 0.013772206565168543,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.237,
						"acc_norm,none": 0.237,
						"acc_norm_stderr,none": 0.013454070462577943,
						"acc_stderr,none": 0.013454070462577943,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.03089738243291862,
						"acc_stderr,none": 0.03089738243291862,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.013414729030247124,
						"acc_stderr,none": 0.013414729030247124,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774164,
						"acc_stderr,none": 0.013877773329774164,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.030275120389073044,
						"acc_stderr,none": 0.030275120389073044,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.013211720158614753,
						"acc_stderr,none": 0.013211720158614753,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.48081561061170797,
						"acc_norm,none": 0.444,
						"acc_norm_stderr,none": 0.0004947174348697385,
						"acc_stderr,none": 0.0403106549260379,
						"alias": "kobest",
						"f1,none": 0.3828631000929086,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5028490028490028,
						"acc_stderr,none": 0.013348550797680823,
						"alias": " - kobest_boolq",
						"f1,none": 0.33586879913255624,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.489,
						"acc_stderr,none": 0.015815471195292686,
						"alias": " - kobest_copa",
						"f1,none": 0.48837325724011915,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.342,
						"acc_norm,none": 0.444,
						"acc_norm_stderr,none": 0.022242244375731027,
						"acc_stderr,none": 0.02123614719989925,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3402686942810196,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5340050377833753,
						"acc_stderr,none": 0.02506776963066191,
						"alias": " - kobest_sentineg",
						"f1,none": 0.5110609601033199,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5606442848825927,
						"acc_stderr,none": 0.025309228284552565,
						"alias": "lambada",
						"perplexity,none": 7.97760922995255,
						"perplexity_stderr,none": 1.0328407259597316
					},
					"lambada_cloze": {
						"acc,none": 0.024451775664661363,
						"acc_stderr,none": 0.004101598453044792,
						"alias": "lambada_cloze",
						"perplexity,none": 706.2427173090024,
						"perplexity_stderr,none": 67.11171869741892
					},
					"lambada_multilingual": {
						"acc,none": 0.38001164370269747,
						"acc_stderr,none": 0.06678689076293869,
						"alias": "lambada_multilingual",
						"perplexity,none": 97.5692210616895,
						"perplexity_stderr,none": 28.905811055132286
					},
					"lambada_openai": {
						"acc,none": 0.609935959635164,
						"acc_stderr,none": 0.00679551146587919,
						"alias": " - lambada_openai",
						"perplexity,none": 5.963786625754829,
						"perplexity_stderr,none": 0.15922492829117155
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.03143799728313604,
						"acc_stderr,none": 0.0024311022348859155,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 582.6978397398414,
						"perplexity_stderr,none": 24.45828176873394
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.2910925674364448,
						"acc_stderr,none": 0.0063288149295274675,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 135.22381288027228,
						"perplexity_stderr,none": 8.321972906565247
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.609353774500291,
						"acc_stderr,none": 0.006797334493142837,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 5.965256058178062,
						"perplexity_stderr,none": 0.1592907180126647
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.3021540849990297,
						"acc_stderr,none": 0.00639743788967891,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 131.3396867226627,
						"perplexity_stderr,none": 7.695906279565065
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.36774694352804194,
						"acc_stderr,none": 0.006717877457481597,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 87.03418589077059,
						"perplexity_stderr,none": 5.1130952200542605
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.3297108480496798,
						"acc_stderr,none": 0.006549524731584283,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 128.28316375656385,
						"perplexity_stderr,none": 8.052571004025129
					},
					"lambada_standard": {
						"acc,none": 0.5119347952648943,
						"acc_stderr,none": 0.006963992915953921,
						"alias": " - lambada_standard",
						"perplexity,none": 9.989593521017243,
						"perplexity_stderr,none": 0.2896866873758146
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.017465554046186688,
						"acc_stderr,none": 0.0018250600085879187,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 829.7875948781635,
						"perplexity_stderr,none": 27.88337421013344
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.23791348600508905,
						"exact_match_stderr,get-answer": 0.010742950531023867
					},
					"logiqa": {
						"acc,none": 0.22734254992319508,
						"acc_norm,none": 0.2764976958525346,
						"acc_norm_stderr,none": 0.017543209075825194,
						"acc_stderr,none": 0.01643906767511775,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2385496183206107,
						"acc_norm,none": 0.26590330788804073,
						"acc_norm_stderr,none": 0.011146805188415496,
						"acc_stderr,none": 0.010752812546961152,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.24489112227805696,
						"acc_norm,none": 0.23986599664991626,
						"acc_norm_stderr,none": 0.007816818250028125,
						"acc_stderr,none": 0.007872123512006534,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.48093624232154203,
						"acc_stderr,none": 0.005142154335176635,
						"alias": "mc_taco",
						"f1,none": 0.4994382596261873,
						"f1_stderr,none": 0.006198780014805503
					},
					"medmcqa": {
						"acc,none": 0.2596222806598135,
						"acc_norm,none": 0.2596222806598135,
						"acc_norm_stderr,none": 0.006779624437908077,
						"acc_stderr,none": 0.006779624437908077,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.23880597014925373,
						"acc_norm,none": 0.23880597014925373,
						"acc_norm_stderr,none": 0.011954370755725675,
						"acc_stderr,none": 0.011954370755725675,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.24747187010397378,
						"acc_stderr,none": 0.03738317829624497,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909282,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.03785714465066653,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.19078947368421054,
						"acc_stderr,none": 0.031975658210325,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2641509433962264,
						"acc_stderr,none": 0.02713429162874171,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.03476590104304134,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909283,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.21965317919075145,
						"acc_stderr,none": 0.031568093627031744,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.22549019607843138,
						"acc_stderr,none": 0.04158307533083286,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.31063829787234043,
						"acc_stderr,none": 0.030251237579213174,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.04049339297748141,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.23448275862068965,
						"acc_stderr,none": 0.035306258743465914,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.022569897074918435,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.24603174603174602,
						"acc_stderr,none": 0.038522733649243183,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252605,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.25161290322580643,
						"acc_stderr,none": 0.024685979286239956,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.24630541871921183,
						"acc_stderr,none": 0.03031509928561773,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.23030303030303031,
						"acc_stderr,none": 0.03287666758603488,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2474747474747475,
						"acc_stderr,none": 0.030746300742124498,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.19689119170984457,
						"acc_stderr,none": 0.02869787397186067,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.21794871794871795,
						"acc_stderr,none": 0.02093244577446319,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24814814814814815,
						"acc_stderr,none": 0.0263357394040558,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2184873949579832,
						"acc_stderr,none": 0.026841514322958948,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2119205298013245,
						"acc_stderr,none": 0.03336767086567977,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.24220183486238533,
						"acc_stderr,none": 0.018368176306598618,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.17592592592592593,
						"acc_stderr,none": 0.02596742095825853,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.03039153369274154,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2616033755274262,
						"acc_stderr,none": 0.028609516716994934,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3721973094170404,
						"acc_stderr,none": 0.03244305283008732,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.24427480916030533,
						"acc_stderr,none": 0.037683359597287434,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2499468650371945,
						"acc_stderr,none": 0.02570536381708068,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.24793388429752067,
						"acc_stderr,none": 0.03941897526516304,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.26851851851851855,
						"acc_stderr,none": 0.04284467968052191,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2822085889570552,
						"acc_stderr,none": 0.03536117886664742,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.04287858751340456,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.24271844660194175,
						"acc_stderr,none": 0.04245022486384495,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.25213675213675213,
						"acc_stderr,none": 0.02844796547623102,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2515964240102171,
						"acc_stderr,none": 0.015517322365529638,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.24855491329479767,
						"acc_stderr,none": 0.023267528432100174,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2424581005586592,
						"acc_stderr,none": 0.014333522059217892,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.22549019607843138,
						"acc_stderr,none": 0.023929155517351298,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2568393949147087,
						"acc_stderr,none": 0.04659847936311328,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2508038585209003,
						"acc_stderr,none": 0.024619771956697165,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2808641975308642,
						"acc_stderr,none": 0.025006469755799215,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25177304964539005,
						"acc_stderr,none": 0.0258921511567094,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.24641460234680573,
						"acc_stderr,none": 0.011005971399927225,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.19117647058823528,
						"acc_stderr,none": 0.023886881922440355,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.25163398692810457,
						"acc_stderr,none": 0.017555818091322277,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.3181818181818182,
						"acc_stderr,none": 0.044612721759105085,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.21224489795918366,
						"acc_stderr,none": 0.026176967197866767,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.23626909327266818,
						"acc_stderr,none": 0.0327684387917673,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.24378109452736318,
						"acc_stderr,none": 0.03036049015401466,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.24548049476688868,
						"acc_stderr,none": 0.044201259805433206,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.2891566265060241,
						"acc_stderr,none": 0.035294868015111155,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03218093795602357,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.38858889454915946,
						"acc_stderr,none": 0.004920268772266655,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3896460537021969,
						"acc_stderr,none": 0.0049184376629680705,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6740196078431373,
						"acc_stderr,none": 0.023234578573523592,
						"alias": "mrpc",
						"f1,none": 0.802962962962963,
						"f1_stderr,none": 0.016725995378431387
					},
					"multimedqa": {
						"acc,none": 0.27622427253371185,
						"acc_norm,none": 0.25152089258184174,
						"acc_norm_stderr,none": 9.418663501048668e-05,
						"acc_stderr,none": 0.08723589266520539,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5691006600660066,
						"acc_stderr,none": 0.007112887654223404,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6751316796912565,
						"mrr_stderr,none": 0.010404390272329823,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.43905191873589167,
						"r@2_stderr,none": 0.016681981598282936
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6409894675689142,
						"mrr_stderr,none": 0.010551473712480663,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.463882618510158,
						"r@2_stderr,none": 0.016763409667403393
					},
					"openbookqa": {
						"acc,none": 0.254,
						"acc_norm,none": 0.37,
						"acc_norm_stderr,none": 0.021613289165165785,
						"acc_stderr,none": 0.019486596801643375,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.427,
						"acc_stderr,none": 0.011063304133448202,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.011039063840074269,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4185,
						"acc_stderr,none": 0.011033573531383047,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.525,
						"acc_stderr,none": 0.011169148353274969,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.559,
						"acc_stderr,none": 0.011105006104468736,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.4955,
						"acc_stderr,none": 0.011182683094883903,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5335,
						"acc_stderr,none": 0.011158007239770808,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48264285714285715,
						"acc_stderr,none": 0.043170460826796556,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7437431991294886,
						"acc_norm,none": 0.7442872687704026,
						"acc_norm_stderr,none": 0.010178690109459858,
						"acc_stderr,none": 0.01018578783156507,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2568317677198975,
						"acc_norm,none": 0.29339239965841163,
						"acc_norm_stderr,none": 0.0033264939132763003,
						"acc_stderr,none": 0.0031918398325104904,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.02185468495561126,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7041466981903363,
						"acc_norm,none": 0.4808648942082854,
						"acc_norm_stderr,none": 0.003947831490021625,
						"acc_stderr,none": 0.14699322403394063,
						"alias": "pythia",
						"bits_per_byte,none": 0.7285090329011088,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6569258405092875,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.963786625754829,
						"perplexity_stderr,none": 0.15922492829117155,
						"word_perplexity,none": 14.883749336326465,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3670212765957447,
						"acc_norm,none": 0.41312056737588654,
						"acc_norm_stderr,none": 0.04665636856731557,
						"acc_stderr,none": 0.03554258710476385,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.36666666666666664,
						"acc_norm,none": 0.48333333333333334,
						"acc_norm_stderr,none": 0.04580945392704764,
						"acc_stderr,none": 0.044175188121443124,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.34375,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.0392039498715957,
						"acc_stderr,none": 0.03766668927755763,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.38028169014084506,
						"acc_norm,none": 0.3767605633802817,
						"acc_norm_stderr,none": 0.028804939288711223,
						"acc_stderr,none": 0.028857363751758312,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.504484715357862,
						"acc_stderr,none": 0.006765138405338173,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5333168439277765,
						"acc_stderr,none": 0.002481173954662773,
						"alias": "qqp",
						"f1,none": 0.3680755576394936,
						"f1_stderr,none": 0.0035508418317945728
					},
					"race": {
						"acc,none": 0.3837320574162679,
						"acc_stderr,none": 0.015050418634703647,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.555956678700361,
						"acc_stderr,none": 0.029907396333795994,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.886,
						"acc_norm,none": 0.787,
						"acc_norm_stderr,none": 0.012953717566737225,
						"acc_stderr,none": 0.010055103435823333,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.555956678700361,
						"acc_stderr,none": 0.029907396333795994,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.5814220183486238,
						"acc_stderr,none": 0.016715710826534454,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5271918424472658,
						"acc_norm,none": 0.701889433170049,
						"acc_norm_stderr,none": 0.0032341023459712395,
						"acc_stderr,none": 0.0035298605080669753,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5029117167481948,
						"acc_stderr,none": 0.011040713804904616,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5022035256410257,
						"acc_stderr,none": 0.005004206829624937,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.47917300091213133,
						"acc_stderr,none": 0.005029471586799785,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5265686274509804,
						"acc_stderr,none": 0.004943985760403929,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.30469795992087373,
						"acc_stderr,none": 0.001552805516278786,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.016339170373280906,
						"bleu_diff,none": -5.725790667225637,
						"bleu_diff_stderr,none": 0.6965950873789364,
						"bleu_max,none": 21.34215767891346,
						"bleu_max_stderr,none": 0.7251541501163864,
						"rouge1_acc,none": 0.2802937576499388,
						"rouge1_acc_stderr,none": 0.015723139524608753,
						"rouge1_diff,none": -8.51446913870341,
						"rouge1_diff_stderr,none": 0.7821992957195486,
						"rouge1_max,none": 45.09414157085349,
						"rouge1_max_stderr,none": 0.8775283766073789,
						"rouge2_acc,none": 0.22766217870257038,
						"rouge2_acc_stderr,none": 0.014679255032111066,
						"rouge2_diff,none": -9.266174219776689,
						"rouge2_diff_stderr,none": 0.9113877449872622,
						"rouge2_max,none": 28.42656401759116,
						"rouge2_max_stderr,none": 0.9559408372967046,
						"rougeL_acc,none": 0.2876376988984088,
						"rougeL_acc_stderr,none": 0.015846315101394805,
						"rougeL_diff,none": -8.446714023463292,
						"rougeL_diff_stderr,none": 0.7898948550543593,
						"rougeL_max,none": 42.18826384118312,
						"rougeL_max_stderr,none": 0.8744877426092712
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.32068543451652387,
						"bleu_acc_stderr,none": 0.016339170373280906,
						"bleu_diff,none": -5.725790667225637,
						"bleu_diff_stderr,none": 0.6965950873789364,
						"bleu_max,none": 21.34215767891346,
						"bleu_max_stderr,none": 0.7251541501163864,
						"rouge1_acc,none": 0.2802937576499388,
						"rouge1_acc_stderr,none": 0.015723139524608753,
						"rouge1_diff,none": -8.51446913870341,
						"rouge1_diff_stderr,none": 0.7821992957195486,
						"rouge1_max,none": 45.09414157085349,
						"rouge1_max_stderr,none": 0.8775283766073789,
						"rouge2_acc,none": 0.22766217870257038,
						"rouge2_acc_stderr,none": 0.014679255032111066,
						"rouge2_diff,none": -9.266174219776689,
						"rouge2_diff_stderr,none": 0.9113877449872622,
						"rouge2_max,none": 28.42656401759116,
						"rouge2_max_stderr,none": 0.9559408372967046,
						"rougeL_acc,none": 0.2876376988984088,
						"rougeL_acc_stderr,none": 0.015846315101394805,
						"rougeL_diff,none": -8.446714023463292,
						"rougeL_diff_stderr,none": 0.7898948550543593,
						"rougeL_max,none": 42.18826384118312,
						"rougeL_max_stderr,none": 0.8744877426092712
					},
					"truthfulqa_mc1": {
						"acc,none": 0.23133414932680538,
						"acc_stderr,none": 0.014761945174862665,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.37806177051494205,
						"acc_stderr,none": 0.013970563918361614,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.02214566929133858,
						"exact_match_stderr,none": 0.0032653258401622418
					},
					"wic": {
						"acc,none": 0.49843260188087773,
						"acc_stderr,none": 0.019810623954060382,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.7285090329011088,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6569258405092875,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 14.883749336326465,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6022099447513812,
						"acc_stderr,none": 0.01375574351374902,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.6056338028169014,
						"acc_stderr,none": 0.058412510854444266,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6346153846153846,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7435897435897436,
						"acc_stderr,none": 0.026475851706699714,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5289090909090909,
						"acc_stderr,none": 0.031702490482383716,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.482,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668086,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.022149790663861926,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.02238289498648353,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.022311333245289666,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668093,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.0221229937781354,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.38827309236947793,
						"acc_stderr,none": 0.053150754335593575,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3377510040160643,
						"acc_stderr,none": 0.009479742273956477,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3795180722891566,
						"acc_stderr,none": 0.009726763372837142,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.44859437751004017,
						"acc_stderr,none": 0.009968964736894261,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.36224899598393573,
						"acc_stderr,none": 0.009634223618009004,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5622489959839357,
						"acc_stderr,none": 0.00994409973429018,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.42530120481927713,
						"acc_stderr,none": 0.00990959719222113,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.44819277108433736,
						"acc_stderr,none": 0.00996812942690988,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3397590361445783,
						"acc_stderr,none": 0.009493454925438252,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.42971887550200805,
						"acc_stderr,none": 0.009922572153607779,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3465863453815261,
						"acc_stderr,none": 0.009538660220458992,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.336144578313253,
						"acc_stderr,none": 0.009468634669293534,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3670682730923695,
						"acc_stderr,none": 0.00966138545009605,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3325301204819277,
						"acc_stderr,none": 0.00944319336590334,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.36265060240963853,
						"acc_stderr,none": 0.009636527012634668,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3457831325301205,
						"acc_stderr,none": 0.00953345503375277,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5399795439504242,
						"acc_stderr,none": 0.06000317291976968,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.4877564526803441,
						"acc_stderr,none": 0.012863267059205548,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7240238252812706,
						"acc_stderr,none": 0.011503334549850868,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5883520847121112,
						"acc_stderr,none": 0.012664648329214082,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.4990072799470549,
						"acc_stderr,none": 0.01286709995542293,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5016545334215751,
						"acc_stderr,none": 0.012867054869163341,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5168762409000662,
						"acc_stderr,none": 0.012859793919977602,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.48974189278623426,
						"acc_stderr,none": 0.01286441704798048,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5466578424884183,
						"acc_stderr,none": 0.012810980537828153,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5049636002647253,
						"acc_stderr,none": 0.012866491277589945,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5221707478491066,
						"acc_stderr,none": 0.012854469625936093,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5585704831237591,
						"acc_stderr,none": 0.012778538985880637,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7125196673409755,
						"acc_stderr,none": 0.07442833285970002,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8159139784946237,
						"acc_stderr,none": 0.008039231425138252,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6144578313253012,
						"acc_stderr,none": 0.05374957797319389,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5557872784150156,
						"acc_stderr,none": 0.016053400564808713,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6273764258555133,
						"acc_stderr,none": 0.029870921174577802,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5746031746031746,
						"acc_stderr,none": 0.027900777694976245,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6805555555555556,
						"acc_stderr,none": 0.020789568197560088,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "TinyLlama/TinyLlama-1.1B-Chat-v1.0"
	},
	"TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.49492671927846676,
						"acc_norm,none": 0.4704058624577227,
						"acc_norm_stderr,none": 0.0403058921311755,
						"acc_stderr,none": 0.051708835620998976,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3384375,
						"acc_stderr,none": 0.015831972244826506,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.18299999999999997,
						"acc_stderr,none": 0.13292976138069018,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8234925373134329,
						"acc_stderr,none": 0.15343640233053038,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.24219910846953938,
						"acc_norm,none": 0.24219910846953938,
						"acc_norm_stderr,none": 0.11818713194395429,
						"acc_stderr,none": 0.11818713194395429,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2484026938352617,
						"acc_norm,none": 0.2484026938352617,
						"acc_norm_stderr,none": 0.04119090717733295,
						"acc_stderr,none": 0.04119090717733295,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.625256220784014,
						"likelihood_diff_stderr,none": 0.44309650974433157,
						"pct_stereotype,none": 0.5794573643410855,
						"pct_stereotype_stderr,none": 0.08054252813486378
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.03740157480314961,
						"exact_match_stderr,none": 0.004210295288134857
					},
					"glue": {
						"acc,none": 0.49324567927370816,
						"acc_stderr,none": 0.06809830262727767,
						"alias": "glue",
						"f1,none": 0.2796921257527253,
						"f1_stderr,none": 0.002719250101006837,
						"mcc,none": 0.028854402929441463,
						"mcc_stderr,none": 0.0009865248005232303
					},
					"kmmlu": {
						"acc,none": 0.27788045047646553,
						"acc_norm,none": 0.27788045047646553,
						"acc_norm_stderr,none": 0.026141445923227077,
						"acc_stderr,none": 0.026141445923227077,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.47358035518526637,
						"acc_norm,none": 0.462,
						"acc_norm_stderr,none": 0.0004981082164328657,
						"acc_stderr,none": 0.03828026577445662,
						"alias": "kobest",
						"f1,none": 0.3660568385894482,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5404618668736658,
						"acc_stderr,none": 0.0243857089340211,
						"alias": "lambada",
						"perplexity,none": 8.804460304577901,
						"perplexity_stderr,none": 0.9744465362542664
					},
					"lambada_cloze": {
						"acc,none": 0.009994178148651271,
						"acc_stderr,none": 0.0013865937441478095,
						"alias": "lambada_cloze",
						"perplexity,none": 731.4224090223846,
						"perplexity_stderr,none": 42.651111003300116
					},
					"lambada_multilingual": {
						"acc,none": 0.36669901028527074,
						"acc_stderr,none": 0.07949610928763419,
						"alias": "lambada_multilingual",
						"perplexity,none": 102.95955655144742,
						"perplexity_stderr,none": 35.76213516118316
					},
					"mmlu": {
						"acc,none": 0.25544794188861986,
						"acc_stderr,none": 0.03446292252527155,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2546227417640808,
						"acc_stderr,none": 0.02672934620343306,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2603797875764403,
						"acc_stderr,none": 0.03438679140961325,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.24504387390315246,
						"acc_stderr,none": 0.034551866666141795,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.26197272438947034,
						"acc_stderr,none": 0.04252822904603301,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.29041873669268986,
						"acc_norm,none": 0.2621056592314852,
						"acc_norm_stderr,none": 0.00011227080363858773,
						"acc_stderr,none": 0.08545187109541512,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.48114285714285715,
						"acc_stderr,none": 0.041467510659430655,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7055008765768597,
						"acc_norm,none": 0.47816743572035797,
						"acc_norm_stderr,none": 0.004496647221307374,
						"acc_stderr,none": 0.1509785421878474,
						"alias": "pythia",
						"bits_per_byte,none": 0.7202951006143281,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6475189970488797,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 6.91937435049955,
						"perplexity_stderr,none": 0.1766649109710932,
						"word_perplexity,none": 14.43743449971529,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3351063829787234,
						"acc_norm,none": 0.3599290780141844,
						"acc_norm_stderr,none": 0.042382067538495355,
						"acc_stderr,none": 0.035435409573252426,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.48673921000965026,
						"acc_stderr,none": 0.01603379529861715,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.2969838170591314,
						"acc_stderr,none": 0.0017656547847256618,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3292533659730722,
						"bleu_acc_stderr,none": 0.01645126444006823,
						"bleu_diff,none": -4.529088243896841,
						"bleu_diff_stderr,none": 0.7712398079275519,
						"bleu_max,none": 21.338292513074606,
						"bleu_max_stderr,none": 0.7277940463151109,
						"rouge1_acc,none": 0.2913096695226438,
						"rouge1_acc_stderr,none": 0.015905987048184828,
						"rouge1_diff,none": -7.482931401402743,
						"rouge1_diff_stderr,none": 0.9496821286941034,
						"rouge1_max,none": 43.84161496308325,
						"rouge1_max_stderr,none": 0.9158903747113558,
						"rouge2_acc,none": 0.20685434516523868,
						"rouge2_acc_stderr,none": 0.014179591496728348,
						"rouge2_diff,none": -8.44885859371253,
						"rouge2_diff_stderr,none": 1.0545426263051483,
						"rouge2_max,none": 27.212662956414054,
						"rouge2_max_stderr,none": 1.000997097911372,
						"rougeL_acc,none": 0.2802937576499388,
						"rougeL_acc_stderr,none": 0.01572313952460876,
						"rougeL_diff,none": -7.719185136012378,
						"rougeL_diff_stderr,none": 0.9527244242978985,
						"rougeL_max,none": 41.177249485889,
						"rougeL_max_stderr,none": 0.9104080216212086
					},
					"xcopa": {
						"acc,none": 0.5292727272727272,
						"acc_stderr,none": 0.02931613977250575,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.3789558232931727,
						"acc_stderr,none": 0.04963845077146467,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5314361350099271,
						"acc_stderr,none": 0.052252798585811375,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7084738143403012,
						"acc_stderr,none": 0.05612959809115702,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.49492671927846676,
						"acc_norm,none": 0.4704058624577227,
						"acc_norm_stderr,none": 0.0403058921311755,
						"acc_stderr,none": 0.051708835620998976,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3384375,
						"acc_stderr,none": 0.015831972244826506,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.341,
						"acc_stderr,none": 0.0149981313484027,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.325,
						"acc_stderr,none": 0.014818724459095524,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3475,
						"acc_stderr,none": 0.013751753243291852,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.2773037542662116,
						"acc_norm,none": 0.3037542662116041,
						"acc_norm_stderr,none": 0.013438909184778766,
						"acc_stderr,none": 0.013082095839059374,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6022727272727273,
						"acc_norm,none": 0.5526094276094277,
						"acc_norm_stderr,none": 0.010202832385415646,
						"acc_stderr,none": 0.01004286160217806,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.18299999999999997,
						"acc_stderr,none": 0.13292976138069018,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.073,
						"acc_stderr,none": 0.0058182837858862906,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.4775,
						"acc_stderr,none": 0.011171807357801173,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.107,
						"acc_stderr,none": 0.006913710993370312,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.4145,
						"acc_stderr,none": 0.011018419931591767,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.010249513464703064,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.2465,
						"acc_stderr,none": 0.009639259964661761,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.094,
						"acc_stderr,none": 0.006527120471603565,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.084,
						"acc_stderr,none": 0.006204131335071217,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0115,
						"acc_stderr,none": 0.0023846841214675827,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.022,
						"acc_stderr,none": 0.0032807593162018905,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0034707158351409977,
						"acc_stderr,none": 0.001225217874391227,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8234925373134329,
						"acc_stderr,none": 0.15343640233053038,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823332,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426605,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.00199699473909873,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.012841374572096933,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.01104345769937822,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.765,
						"acc_stderr,none": 0.013414729030247124,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.507,
						"acc_stderr,none": 0.015817749561843567,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.781,
						"acc_stderr,none": 0.013084731950262033,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.844,
						"acc_stderr,none": 0.011480235006122353,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426665,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274703,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.00648892179842742,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817152,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697053,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.0112972398234093,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621268,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695459,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946095,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.653,
						"acc_stderr,none": 0.015060472031706618,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.012864077288499332,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731973,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942323,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.843,
						"acc_stderr,none": 0.011510146979230173,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045044,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.463,
						"acc_stderr,none": 0.015775927227262416,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866447,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.012886662332274538,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.66,
						"acc_stderr,none": 0.014987482264363935,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386705,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275289,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.00807249435832351,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919302,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651526,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.461,
						"acc_stderr,none": 0.015771104201283186,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.011266140684632163,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.754,
						"acc_stderr,none": 0.013626065817750634,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.617,
						"acc_stderr,none": 0.01538010232565271,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.014470846741134708,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.00539714082909919,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.011569479368271298,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397254,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491125,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.749,
						"acc_stderr,none": 0.013718133516888916,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286412,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987295,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.802,
						"acc_stderr,none": 0.01260773393417531,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.719,
						"acc_stderr,none": 0.014221154708434935,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.351,
						"acc_stderr,none": 0.015100563798316405,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.00589395781616557,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024952,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.003299983316607816,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.773,
						"acc_stderr,none": 0.013253174964763892,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.443,
						"acc_stderr,none": 0.015716169953204105,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.01001655286669684,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.0092769101031033,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.703,
						"acc_stderr,none": 0.0144568322948011,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.01053479862085575,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837956,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.011772110370812189,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139978,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724425,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452369,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295442,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.394,
						"acc_stderr,none": 0.01545972195749338,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.014498627873361425,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.57217125382263,
						"acc_stderr,none": 0.00865347489463719,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.16071428571428573,
						"acc_stderr,none": 0.04952230059306298,
						"alias": "cb",
						"f1,none": 0.15256008359456635,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.24219910846953938,
						"acc_norm,none": 0.24219910846953938,
						"acc_norm_stderr,none": 0.11818713194395429,
						"acc_stderr,none": 0.11818713194395429,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5151515151515151,
						"acc_norm,none": 0.5151515151515151,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122592,
						"acc_stderr,none": 0.08503766788122592,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.14893617021276595,
						"acc_norm,none": 0.14893617021276595,
						"acc_norm_stderr,none": 0.05249310253140093,
						"acc_stderr,none": 0.05249310253140093,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.10094660663590604,
						"acc_stderr,none": 0.10094660663590604,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.05781449705557245,
						"acc_stderr,none": 0.05781449705557245,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.35135135135135137,
						"acc_norm,none": 0.35135135135135137,
						"acc_norm_stderr,none": 0.07956541321016082,
						"acc_stderr,none": 0.07956541321016082,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.16129032258064516,
						"acc_norm,none": 0.16129032258064516,
						"acc_norm_stderr,none": 0.06715051611181073,
						"acc_stderr,none": 0.06715051611181073,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031766,
						"acc_stderr,none": 0.07633651333031766,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.09176629354822471,
						"acc_stderr,none": 0.09176629354822471,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.0762215933966706,
						"acc_stderr,none": 0.0762215933966706,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.06007385040937024,
						"acc_stderr,none": 0.06007385040937024,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.14213381090374033,
						"acc_stderr,none": 0.14213381090374033,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.13636363636363635,
						"acc_norm,none": 0.13636363636363635,
						"acc_norm_stderr,none": 0.07488677009526491,
						"acc_stderr,none": 0.07488677009526491,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.11433239009500591,
						"acc_stderr,none": 0.11433239009500591,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.052631578947368404,
						"acc_stderr,none": 0.052631578947368404,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.3409090909090909,
						"acc_norm,none": 0.3409090909090909,
						"acc_norm_stderr,none": 0.07228658768525041,
						"acc_stderr,none": 0.07228658768525041,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.06521739130434782,
						"acc_norm,none": 0.06521739130434782,
						"acc_norm_stderr,none": 0.03680702927304433,
						"acc_stderr,none": 0.03680702927304433,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2484026938352617,
						"acc_norm,none": 0.2484026938352617,
						"acc_norm_stderr,none": 0.04119090717733295,
						"acc_stderr,none": 0.04119090717733295,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516737,
						"acc_stderr,none": 0.03385633936516737,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.22972972972972974,
						"acc_norm,none": 0.22972972972972974,
						"acc_norm_stderr,none": 0.03469536825407608,
						"acc_stderr,none": 0.03469536825407608,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.18125,
						"acc_norm,none": 0.18125,
						"acc_norm_stderr,none": 0.030550343799854465,
						"acc_stderr,none": 0.030550343799854465,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.031234752377721175,
						"acc_stderr,none": 0.031234752377721175,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2822966507177033,
						"acc_norm,none": 0.2822966507177033,
						"acc_norm_stderr,none": 0.031209993754410442,
						"acc_stderr,none": 0.031209993754410442,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.03311643267635493,
						"acc_stderr,none": 0.03311643267635493,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.25190839694656486,
						"acc_norm,none": 0.25190839694656486,
						"acc_norm_stderr,none": 0.03807387116306085,
						"acc_stderr,none": 0.03807387116306085,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.22058823529411764,
						"acc_norm,none": 0.22058823529411764,
						"acc_norm_stderr,none": 0.03568681318274767,
						"acc_stderr,none": 0.03568681318274767,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2897196261682243,
						"acc_norm,none": 0.2897196261682243,
						"acc_norm_stderr,none": 0.0440606533474851,
						"acc_stderr,none": 0.0440606533474851,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.024539600216850282,
						"acc_stderr,none": 0.024539600216850282,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03039153369274154,
						"acc_stderr,none": 0.03039153369274154,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.21787709497206703,
						"acc_norm,none": 0.21787709497206703,
						"acc_norm_stderr,none": 0.030940924724402182,
						"acc_stderr,none": 0.030940924724402182,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.26582278481012656,
						"acc_norm,none": 0.26582278481012656,
						"acc_norm_stderr,none": 0.028756799629658335,
						"acc_stderr,none": 0.028756799629658335,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.27102803738317754,
						"acc_norm,none": 0.27102803738317754,
						"acc_norm_stderr,none": 0.043172737765666686,
						"acc_stderr,none": 0.043172737765666686,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.04236511258094631,
						"acc_stderr,none": 0.04236511258094631,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.042520162237633094,
						"acc_stderr,none": 0.042520162237633094,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.28205128205128205,
						"acc_norm,none": 0.28205128205128205,
						"acc_norm_stderr,none": 0.02728514708163732,
						"acc_stderr,none": 0.02728514708163732,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.22058823529411764,
						"acc_norm,none": 0.22058823529411764,
						"acc_norm_stderr,none": 0.02910225438967409,
						"acc_stderr,none": 0.02910225438967409,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.23976608187134502,
						"acc_norm,none": 0.23976608187134502,
						"acc_norm_stderr,none": 0.03274485211946956,
						"acc_stderr,none": 0.03274485211946956,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2789115646258503,
						"acc_norm,none": 0.2789115646258503,
						"acc_norm_stderr,none": 0.03711513959675178,
						"acc_stderr,none": 0.03711513959675178,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.20863309352517986,
						"acc_norm,none": 0.20863309352517986,
						"acc_norm_stderr,none": 0.03458923827478227,
						"acc_stderr,none": 0.03458923827478227,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.0342292401764445,
						"acc_stderr,none": 0.0342292401764445,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2392638036809816,
						"acc_norm,none": 0.2392638036809816,
						"acc_norm_stderr,none": 0.033519538795212696,
						"acc_stderr,none": 0.033519538795212696,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.22674418604651161,
						"acc_norm,none": 0.22674418604651161,
						"acc_norm_stderr,none": 0.03202075899584939,
						"acc_stderr,none": 0.03202075899584939,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.02688368747322085,
						"acc_stderr,none": 0.02688368747322085,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.21717171717171718,
						"acc_norm,none": 0.21717171717171718,
						"acc_norm_stderr,none": 0.029376616484945627,
						"acc_stderr,none": 0.029376616484945627,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.25630252100840334,
						"acc_norm,none": 0.25630252100840334,
						"acc_norm_stderr,none": 0.02835962087053395,
						"acc_stderr,none": 0.02835962087053395,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.28695652173913044,
						"acc_norm,none": 0.28695652173913044,
						"acc_norm_stderr,none": 0.029891541673635467,
						"acc_stderr,none": 0.029891541673635467,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03785714465066655,
						"acc_stderr,none": 0.03785714465066655,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695624,
						"acc_stderr,none": 0.03737392962695624,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2556818181818182,
						"acc_norm,none": 0.2556818181818182,
						"acc_norm_stderr,none": 0.0329769292543446,
						"acc_stderr,none": 0.0329769292543446,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2684563758389262,
						"acc_norm,none": 0.2684563758389262,
						"acc_norm_stderr,none": 0.036427227538629016,
						"acc_stderr,none": 0.036427227538629016,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.21301775147928995,
						"acc_norm,none": 0.21301775147928995,
						"acc_norm_stderr,none": 0.0315889889113352,
						"acc_stderr,none": 0.0315889889113352,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.035717915564682706,
						"acc_stderr,none": 0.035717915564682706,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714263,
						"acc_stderr,none": 0.04025566684714263,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2636363636363636,
						"acc_norm,none": 0.2636363636363636,
						"acc_norm_stderr,none": 0.04220224692971987,
						"acc_stderr,none": 0.04220224692971987,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.22377622377622378,
						"acc_norm,none": 0.22377622377622378,
						"acc_norm_stderr,none": 0.03497488288382342,
						"acc_stderr,none": 0.03497488288382342,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.039325376803928704,
						"acc_stderr,none": 0.039325376803928704,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.03253020905593335,
						"acc_stderr,none": 0.03253020905593335,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.21511627906976744,
						"acc_norm,none": 0.21511627906976744,
						"acc_norm_stderr,none": 0.03142253684735938,
						"acc_stderr,none": 0.03142253684735938,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.21654501216545013,
						"acc_norm,none": 0.21654501216545013,
						"acc_norm_stderr,none": 0.020341791049505637,
						"acc_stderr,none": 0.020341791049505637,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.029761395837435978,
						"acc_stderr,none": 0.029761395837435978,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.21138211382113822,
						"acc_norm,none": 0.21138211382113822,
						"acc_norm_stderr,none": 0.03696472795695268,
						"acc_stderr,none": 0.03696472795695268,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.30327868852459017,
						"acc_norm,none": 0.30327868852459017,
						"acc_norm_stderr,none": 0.04178859878631876,
						"acc_stderr,none": 0.04178859878631876,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.02904595687156657,
						"acc_stderr,none": 0.02904595687156657,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032364888900157734,
						"acc_stderr,none": 0.032364888900157734,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2751322751322751,
						"acc_norm,none": 0.2751322751322751,
						"acc_norm_stderr,none": 0.032570260086303135,
						"acc_stderr,none": 0.032570260086303135,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.28448275862068967,
						"acc_norm,none": 0.28448275862068967,
						"acc_norm_stderr,none": 0.042071607555840204,
						"acc_stderr,none": 0.042071607555840204,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.03565998174135302,
						"acc_stderr,none": 0.03565998174135302,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26285714285714284,
						"acc_norm,none": 0.26285714285714284,
						"acc_norm_stderr,none": 0.033370375852212746,
						"acc_stderr,none": 0.033370375852212746,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.03011304016776725,
						"acc_stderr,none": 0.03011304016776725,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2473404255319149,
						"acc_norm,none": 0.2473404255319149,
						"acc_norm_stderr,none": 0.022280822212812246,
						"acc_stderr,none": 0.022280822212812246,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3017241379310345,
						"acc_norm,none": 0.3017241379310345,
						"acc_norm_stderr,none": 0.030200390075231464,
						"acc_stderr,none": 0.030200390075231464,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.22988505747126436,
						"acc_norm,none": 0.22988505747126436,
						"acc_norm_stderr,none": 0.03198969467577206,
						"acc_stderr,none": 0.03198969467577206,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.17777777777777778,
						"acc_norm,none": 0.17777777777777778,
						"acc_norm_stderr,none": 0.03302789859901717,
						"acc_stderr,none": 0.03302789859901717,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.26991150442477874,
						"acc_norm,none": 0.26991150442477874,
						"acc_norm_stderr,none": 0.029594239995417392,
						"acc_stderr,none": 0.029594239995417392,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03477691162163659,
						"acc_stderr,none": 0.03477691162163659,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.03273943999002354,
						"acc_stderr,none": 0.03273943999002354,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.032793177922689494,
						"acc_stderr,none": 0.032793177922689494,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.22981366459627328,
						"acc_norm,none": 0.22981366459627328,
						"acc_norm_stderr,none": 0.0332602751192305,
						"acc_stderr,none": 0.0332602751192305,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.028854402929441463,
						"mcc_stderr,none": 0.0314089923512874
					},
					"copa": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.04163331998932261,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.625256220784014,
						"likelihood_diff_stderr,none": 0.44309650974433157,
						"pct_stereotype,none": 0.5794573643410855,
						"pct_stereotype_stderr,none": 0.08054252813486378
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.484492391223487,
						"likelihood_diff_stderr,none": 0.08521316825022465,
						"pct_stereotype,none": 0.6428145497912939,
						"pct_stereotype_stderr,none": 0.011704496116299284
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.707845017150208,
						"likelihood_diff_stderr,none": 0.3674119940680855,
						"pct_stereotype,none": 0.7142857142857143,
						"pct_stereotype_stderr,none": 0.04761904761904759
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 4.8722218600186435,
						"likelihood_diff_stderr,none": 1.7368742534632615,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.838085233248197,
						"likelihood_diff_stderr,none": 0.6547984603159045,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.8009320557117463,
						"likelihood_diff_stderr,none": 0.18712616496117432,
						"pct_stereotype,none": 0.64375,
						"pct_stereotype_stderr,none": 0.02681271031002423
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.1854864667963096,
						"likelihood_diff_stderr,none": 0.20672239986451674,
						"pct_stereotype,none": 0.5925925925925926,
						"pct_stereotype_stderr,none": 0.033509916046960436
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.2665699587927923,
						"likelihood_diff_stderr,none": 0.2953736895642618,
						"pct_stereotype,none": 0.7361111111111112,
						"pct_stereotype_stderr,none": 0.05230618728513982
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.3215710196908064,
						"likelihood_diff_stderr,none": 0.14422453836774976,
						"pct_stereotype,none": 0.5413385826771654,
						"pct_stereotype_stderr,none": 0.02212975549054906
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.6912048099277257,
						"likelihood_diff_stderr,none": 0.33808322755488207,
						"pct_stereotype,none": 0.7387387387387387,
						"pct_stereotype_stderr,none": 0.04188770861432396
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.645900869882235,
						"likelihood_diff_stderr,none": 0.44586681622875785,
						"pct_stereotype,none": 0.8494623655913979,
						"pct_stereotype_stderr,none": 0.03728212869390004
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 3.810248455248381,
						"likelihood_diff_stderr,none": 0.23520837606907663,
						"pct_stereotype,none": 0.7,
						"pct_stereotype_stderr,none": 0.03333333333333336
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.7650829035395588,
						"likelihood_diff_stderr,none": 0.09009004154257412,
						"pct_stereotype,none": 0.5163983303518187,
						"pct_stereotype_stderr,none": 0.012206729011137946
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.745386505126953,
						"likelihood_diff_stderr,none": 0.3494063038748805,
						"pct_stereotype,none": 0.4111111111111111,
						"pct_stereotype_stderr,none": 0.052155640611075554
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.1304071866548977,
						"likelihood_diff_stderr,none": 0.8135156788886825,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.173840436068448,
						"likelihood_diff_stderr,none": 0.45694789133985425,
						"pct_stereotype,none": 0.5757575757575758,
						"pct_stereotype_stderr,none": 0.06130137276858363
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.645669729167427,
						"likelihood_diff_stderr,none": 0.18335184963441503,
						"pct_stereotype,none": 0.4797507788161994,
						"pct_stereotype_stderr,none": 0.027927918885132307
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.227144731363289,
						"likelihood_diff_stderr,none": 0.23442560609227314,
						"pct_stereotype,none": 0.3794466403162055,
						"pct_stereotype_stderr,none": 0.030567832939072923
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.812292628818088,
						"likelihood_diff_stderr,none": 0.5411848959194571,
						"pct_stereotype,none": 0.5694444444444444,
						"pct_stereotype_stderr,none": 0.05876396677084613
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.3008813526319423,
						"likelihood_diff_stderr,none": 0.1780765357206828,
						"pct_stereotype,none": 0.5217391304347826,
						"pct_stereotype_stderr,none": 0.023315932363473745
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.3669911923615827,
						"likelihood_diff_stderr,none": 0.32923709095365683,
						"pct_stereotype,none": 0.5043478260869565,
						"pct_stereotype_stderr,none": 0.04682752006203915
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 4.604767055301876,
						"likelihood_diff_stderr,none": 0.3787779721219335,
						"pct_stereotype,none": 0.7582417582417582,
						"pct_stereotype_stderr,none": 0.045130821483550014
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.9409871587947922,
						"likelihood_diff_stderr,none": 0.2630075831888337,
						"pct_stereotype,none": 0.6428571428571429,
						"pct_stereotype_stderr,none": 0.03431317581537577
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.03740157480314961,
						"exact_match_stderr,none": 0.004210295288134857
					},
					"glue": {
						"acc,none": 0.49324567927370816,
						"acc_stderr,none": 0.06809830262727767,
						"alias": "glue",
						"f1,none": 0.2796921257527253,
						"f1_stderr,none": 0.002719250101006837,
						"mcc,none": 0.028854402929441463,
						"mcc_stderr,none": 0.0009865248005232303
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.01061410159211524,
						"exact_match_stderr,get-answer": 0.002822713322387704
					},
					"hellaswag": {
						"acc,none": 0.4501095399322844,
						"acc_norm,none": 0.5914160525791675,
						"acc_norm_stderr,none": 0.0049056744086140225,
						"acc_stderr,none": 0.004964879563513312,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.27788045047646553,
						"acc_norm,none": 0.27788045047646553,
						"acc_norm_stderr,none": 0.026141445923227077,
						"acc_stderr,none": 0.026141445923227077,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816508,
						"acc_stderr,none": 0.04229525846816508,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.313,
						"acc_norm,none": 0.313,
						"acc_norm_stderr,none": 0.014671272822977881,
						"acc_stderr,none": 0.014671272822977881,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.296,
						"acc_norm,none": 0.296,
						"acc_norm_stderr,none": 0.014442734941575023,
						"acc_stderr,none": 0.014442734941575023,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.013860415257527911,
						"acc_stderr,none": 0.013860415257527911,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.284,
						"acc_norm,none": 0.284,
						"acc_norm_stderr,none": 0.014267009061031307,
						"acc_stderr,none": 0.014267009061031307,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.018032386001530083,
						"acc_stderr,none": 0.018032386001530083,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.299,
						"acc_norm,none": 0.299,
						"acc_norm_stderr,none": 0.014484778521220478,
						"acc_stderr,none": 0.014484778521220478,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306265,
						"acc_stderr,none": 0.014158794845306265,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.311,
						"acc_norm,none": 0.311,
						"acc_norm_stderr,none": 0.014645596385722692,
						"acc_stderr,none": 0.014645596385722692,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.030275120389073044,
						"acc_stderr,none": 0.030275120389073044,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.308,
						"acc_norm,none": 0.308,
						"acc_norm_stderr,none": 0.014606483127342763,
						"acc_stderr,none": 0.014606483127342763,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.23846153846153847,
						"acc_norm,none": 0.23846153846153847,
						"acc_norm_stderr,none": 0.03751977598816765,
						"acc_stderr,none": 0.03751977598816765,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.04560480215720684,
						"acc_stderr,none": 0.04560480215720684,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.01377220656516854,
						"acc_stderr,none": 0.01377220656516854,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.01396316475480995,
						"acc_stderr,none": 0.01396316475480995,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485247,
						"acc_stderr,none": 0.014174516461485247,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.296,
						"acc_norm,none": 0.296,
						"acc_norm_stderr,none": 0.01444273494157502,
						"acc_stderr,none": 0.01444273494157502,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.014221154708434935,
						"acc_stderr,none": 0.014221154708434935,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.013912208651021355,
						"acc_stderr,none": 0.013912208651021355,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.01449862787336143,
						"acc_stderr,none": 0.01449862787336143,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.298,
						"acc_norm,none": 0.298,
						"acc_norm_stderr,none": 0.01447084674113471,
						"acc_stderr,none": 0.01447084674113471,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.04163331998932269,
						"acc_stderr,none": 0.04163331998932269,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.014428554438445517,
						"acc_stderr,none": 0.014428554438445517,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.299,
						"acc_norm,none": 0.299,
						"acc_norm_stderr,none": 0.014484778521220463,
						"acc_stderr,none": 0.014484778521220463,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.299,
						"acc_norm,none": 0.299,
						"acc_norm_stderr,none": 0.014484778521220482,
						"acc_stderr,none": 0.014484778521220482,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881424,
						"acc_stderr,none": 0.013588548437881424,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.294,
						"acc_norm,none": 0.294,
						"acc_norm_stderr,none": 0.014414290540008215,
						"acc_stderr,none": 0.014414290540008215,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.013860415257527911,
						"acc_stderr,none": 0.013860415257527911,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.2783333333333333,
						"acc_norm,none": 0.2783333333333333,
						"acc_norm_stderr,none": 0.018312073472792113,
						"acc_stderr,none": 0.018312073472792113,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.01353152253451544,
						"acc_stderr,none": 0.01353152253451544,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.314,
						"acc_norm,none": 0.314,
						"acc_norm_stderr,none": 0.014683991951087962,
						"acc_stderr,none": 0.014683991951087962,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259726,
						"acc_stderr,none": 0.013929286594259726,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.293,
						"acc_norm,none": 0.293,
						"acc_norm_stderr,none": 0.014399942998441275,
						"acc_stderr,none": 0.014399942998441275,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.04093601807403325,
						"acc_stderr,none": 0.04093601807403325,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.25333333333333335,
						"acc_norm,none": 0.25333333333333335,
						"acc_norm_stderr,none": 0.025152082937711918,
						"acc_stderr,none": 0.025152082937711918,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314137,
						"acc_stderr,none": 0.013644675781314137,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.289,
						"acc_norm,none": 0.289,
						"acc_norm_stderr,none": 0.014341711358296174,
						"acc_stderr,none": 0.014341711358296174,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440469,
						"acc_stderr,none": 0.013946271849440469,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.030488073292114205,
						"acc_stderr,none": 0.030488073292114205,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.013531522534515427,
						"acc_stderr,none": 0.013531522534515427,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.013912208651021352,
						"acc_stderr,none": 0.013912208651021352,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.030897382432918605,
						"acc_stderr,none": 0.030897382432918605,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.013912208651021349,
						"acc_stderr,none": 0.013912208651021349,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.47358035518526637,
						"acc_norm,none": 0.462,
						"acc_norm_stderr,none": 0.0004981082164328657,
						"acc_stderr,none": 0.03828026577445662,
						"alias": "kobest",
						"f1,none": 0.3660568385894482,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.473,
						"acc_stderr,none": 0.01579621855130262,
						"alias": " - kobest_copa",
						"f1,none": 0.47249306583626866,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.352,
						"acc_norm,none": 0.462,
						"acc_norm_stderr,none": 0.022318338119870523,
						"acc_stderr,none": 0.021380042385946034,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.34917519209343384,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.4811083123425693,
						"acc_stderr,none": 0.02510800428419159,
						"alias": " - kobest_sentineg",
						"f1,none": 0.3523756731073804,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5404618668736658,
						"acc_stderr,none": 0.0243857089340211,
						"alias": "lambada",
						"perplexity,none": 8.804460304577901,
						"perplexity_stderr,none": 0.9744465362542664
					},
					"lambada_cloze": {
						"acc,none": 0.009994178148651271,
						"acc_stderr,none": 0.0013865937441478095,
						"alias": "lambada_cloze",
						"perplexity,none": 731.4224090223846,
						"perplexity_stderr,none": 42.651111003300116
					},
					"lambada_multilingual": {
						"acc,none": 0.36669901028527074,
						"acc_stderr,none": 0.07949610928763419,
						"alias": "lambada_multilingual",
						"perplexity,none": 102.95955655144742,
						"perplexity_stderr,none": 35.76213516118316
					},
					"lambada_openai": {
						"acc,none": 0.5872307393751213,
						"acc_stderr,none": 0.006859147422201016,
						"alias": " - lambada_openai",
						"perplexity,none": 6.91937435049955,
						"perplexity_stderr,none": 0.1766649109710932
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.01009120900446342,
						"acc_stderr,none": 0.001392455304523369,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 664.085389024031,
						"perplexity_stderr,none": 24.925771198295397
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.2800310498738599,
						"acc_stderr,none": 0.0062556443609290075,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 136.47301285021462,
						"perplexity_stderr,none": 8.17461187962502
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.5874248010867456,
						"acc_stderr,none": 0.006858667841807084,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 6.919925752005728,
						"perplexity_stderr,none": 0.17668090153855184
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.2955559868038036,
						"acc_stderr,none": 0.006357043665649022,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 139.62434189917408,
						"perplexity_stderr,none": 7.861985116213254
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.3471764020958665,
						"acc_stderr,none": 0.006632619664862147,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 94.55418880614887,
						"perplexity_stderr,none": 5.4134190378935
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.323306811566078,
						"acc_stderr,none": 0.006516515049707138,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 137.22631344969375,
						"perplexity_stderr,none": 8.384348106946247
					},
					"lambada_standard": {
						"acc,none": 0.49369299437221037,
						"acc_stderr,none": 0.006965423445368989,
						"alias": " - lambada_standard",
						"perplexity,none": 10.689004026190204,
						"perplexity_stderr,none": 0.30326234332085994
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.009897147292839123,
						"acc_stderr,none": 0.0013791364776453631,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 798.7594290207383,
						"perplexity_stderr,none": 27.38094918544164
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.24681933842239187,
						"exact_match_stderr,get-answer": 0.010878050728561937
					},
					"logiqa": {
						"acc,none": 0.22119815668202766,
						"acc_norm,none": 0.2626728110599078,
						"acc_norm_stderr,none": 0.017261598347857544,
						"acc_stderr,none": 0.016279743532401653,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2480916030534351,
						"acc_norm,none": 0.26908396946564883,
						"acc_norm_stderr,none": 0.011188955943255002,
						"acc_stderr,none": 0.010896835820663161,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2425460636515913,
						"acc_norm,none": 0.23953098827470687,
						"acc_norm_stderr,none": 0.00781307880281329,
						"acc_stderr,none": 0.007846497115068572,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.5139800889642026,
						"acc_stderr,none": 0.0051438841582924265,
						"alias": "mc_taco",
						"f1,none": 0.42687648307730736,
						"f1_stderr,none": 0.0069405619065240325
					},
					"medmcqa": {
						"acc,none": 0.2725316758307435,
						"acc_norm,none": 0.2725316758307435,
						"acc_norm_stderr,none": 0.006885310389735159,
						"acc_stderr,none": 0.006885310389735159,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.24194815396700706,
						"acc_norm,none": 0.24194815396700706,
						"acc_norm_stderr,none": 0.012007899809266111,
						"acc_stderr,none": 0.012007899809266111,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.25544794188861986,
						"acc_stderr,none": 0.03446292252527155,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.2814814814814815,
						"acc_stderr,none": 0.03885004245800253,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.21710526315789475,
						"acc_stderr,none": 0.03355045304882923,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.27169811320754716,
						"acc_stderr,none": 0.027377706624670713,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.22916666666666666,
						"acc_stderr,none": 0.035146974678623884,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621503,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2543352601156069,
						"acc_stderr,none": 0.0332055644308557,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.22549019607843138,
						"acc_stderr,none": 0.04158307533083286,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252604,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2936170212765957,
						"acc_stderr,none": 0.02977164271249123,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2894736842105263,
						"acc_stderr,none": 0.04266339443159394,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2413793103448276,
						"acc_stderr,none": 0.03565998174135303,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2804232804232804,
						"acc_stderr,none": 0.02313528797432562,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.23809523809523808,
						"acc_stderr,none": 0.038095238095238106,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.24838709677419354,
						"acc_stderr,none": 0.024580028921481003,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.26108374384236455,
						"acc_stderr,none": 0.03090379695211447,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909284,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.22424242424242424,
						"acc_stderr,none": 0.032568666616811015,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2474747474747475,
						"acc_stderr,none": 0.030746300742124515,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.21761658031088082,
						"acc_stderr,none": 0.02977866303775296,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2564102564102564,
						"acc_stderr,none": 0.022139081103971545,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25555555555555554,
						"acc_stderr,none": 0.026593939101844072,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2773109243697479,
						"acc_stderr,none": 0.02907937453948001,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2913907284768212,
						"acc_stderr,none": 0.037101857261199966,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.24220183486238533,
						"acc_stderr,none": 0.018368176306598618,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2175925925925926,
						"acc_stderr,none": 0.028139689444859676,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.030587591351604246,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.270042194092827,
						"acc_stderr,none": 0.028900721906293426,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3094170403587444,
						"acc_stderr,none": 0.031024411740572223,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.22900763358778625,
						"acc_stderr,none": 0.036853466317118506,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2546227417640808,
						"acc_stderr,none": 0.02672934620343306,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2396694214876033,
						"acc_stderr,none": 0.03896878985070417,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.04330043749650742,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3067484662576687,
						"acc_stderr,none": 0.036230899157241474,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.25892857142857145,
						"acc_stderr,none": 0.041577515398656284,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2815533980582524,
						"acc_stderr,none": 0.04453254836326469,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2606837606837607,
						"acc_stderr,none": 0.028760348956523414,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.04605661864718381,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.24010217113665389,
						"acc_stderr,none": 0.015274685213734188,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.24566473988439305,
						"acc_stderr,none": 0.023176298203992,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.26256983240223464,
						"acc_stderr,none": 0.01471682427301776,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.025646863097137894,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2603797875764403,
						"acc_stderr,none": 0.03438679140961325,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2315112540192926,
						"acc_stderr,none": 0.023956532766639133,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.02438366553103546,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.24468085106382978,
						"acc_stderr,none": 0.025645553622266736,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2516297262059974,
						"acc_stderr,none": 0.011083276280441902,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.24632352941176472,
						"acc_stderr,none": 0.02617343857052,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.017630827375148383,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.04389311454644287,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2163265306122449,
						"acc_stderr,none": 0.026358916334904052,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.24504387390315246,
						"acc_stderr,none": 0.034551866666141795,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.208955223880597,
						"acc_stderr,none": 0.028748298931728658,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.26197272438947034,
						"acc_stderr,none": 0.04252822904603301,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.03861229196653697,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.24096385542168675,
						"acc_stderr,none": 0.03329394119073529,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.2573099415204678,
						"acc_stderr,none": 0.03352799844161865,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.37259296994396335,
						"acc_stderr,none": 0.004880552532182277,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.37561025223759154,
						"acc_stderr,none": 0.004884248508854319,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6838235294117647,
						"acc_stderr,none": 0.02304833666842021,
						"alias": "mrpc",
						"f1,none": 0.8094534711964549,
						"f1_stderr,none": 0.016446970592788025
					},
					"multimedqa": {
						"acc,none": 0.29041873669268986,
						"acc_norm,none": 0.2621056592314852,
						"acc_norm_stderr,none": 0.00011227080363858773,
						"acc_stderr,none": 0.08545187109541512,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5571369636963697,
						"acc_stderr,none": 0.007134757116013838,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6543453743778018,
						"mrr_stderr,none": 0.010285722448899795,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4525959367945824,
						"r@2_stderr,none": 0.016731608666774797
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6361926279407053,
						"mrr_stderr,none": 0.010452229377846638,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4729119638826185,
						"r@2_stderr,none": 0.016782632881639635
					},
					"openbookqa": {
						"acc,none": 0.22,
						"acc_norm,none": 0.356,
						"acc_norm_stderr,none": 0.021434712356072645,
						"acc_stderr,none": 0.01854421137582033,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.433,
						"acc_stderr,none": 0.011082279027990138,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.416,
						"acc_stderr,none": 0.011024190055654283,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4385,
						"acc_stderr,none": 0.011098218786369077,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5165,
						"acc_stderr,none": 0.011177045144808306,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.559,
						"acc_stderr,none": 0.011105006104468736,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.472,
						"acc_stderr,none": 0.011165587094621541,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.533,
						"acc_stderr,none": 0.011158752568250668,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48114285714285715,
						"acc_stderr,none": 0.041467510659430655,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7317736670293797,
						"acc_norm,none": 0.7323177366702938,
						"acc_norm_stderr,none": 0.010330111189370434,
						"acc_stderr,none": 0.010336761992404483,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2509607173356106,
						"acc_norm,none": 0.27380444064901793,
						"acc_norm_stderr,none": 0.0032577682728456027,
						"acc_stderr,none": 0.0031675853233795056,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.021772369465547198,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7055008765768597,
						"acc_norm,none": 0.47816743572035797,
						"acc_norm_stderr,none": 0.004496647221307374,
						"acc_stderr,none": 0.1509785421878474,
						"alias": "pythia",
						"bits_per_byte,none": 0.7202951006143281,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6475189970488797,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 6.91937435049955,
						"perplexity_stderr,none": 0.1766649109710932,
						"word_perplexity,none": 14.43743449971529,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3351063829787234,
						"acc_norm,none": 0.3599290780141844,
						"acc_norm_stderr,none": 0.042382067538495355,
						"acc_stderr,none": 0.035435409573252426,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.35,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.0451938453788867,
						"acc_stderr,none": 0.04372373160976027,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.30625,
						"acc_norm,none": 0.3625,
						"acc_norm_stderr,none": 0.038123743406448904,
						"acc_stderr,none": 0.036554511504337694,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.34507042253521125,
						"acc_norm,none": 0.3345070422535211,
						"acc_norm_stderr,none": 0.028046659818657005,
						"acc_stderr,none": 0.02825907565693515,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.48196961376533043,
						"acc_stderr,none": 0.006761010320862801,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5443729903536978,
						"acc_stderr,none": 0.002476888834990202,
						"alias": "qqp",
						"f1,none": 0.27439240556190175,
						"f1_stderr,none": 0.003687801885493474
					},
					"race": {
						"acc,none": 0.3626794258373206,
						"acc_stderr,none": 0.014879563111287502,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5234657039711191,
						"acc_stderr,none": 0.030063300411902652,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.889,
						"acc_norm,none": 0.819,
						"acc_norm_stderr,none": 0.012181436179177904,
						"acc_stderr,none": 0.009938701010583726,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5234657039711191,
						"acc_stderr,none": 0.030063300411902652,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.7041284403669725,
						"acc_stderr,none": 0.01546566063319955,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5115965210436869,
						"acc_norm,none": 0.696740977706688,
						"acc_norm_stderr,none": 0.003249924442556649,
						"acc_stderr,none": 0.0035341411257345055,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.48673921000965026,
						"acc_stderr,none": 0.01603379529861715,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5005008012820513,
						"acc_stderr,none": 0.005004252916283737,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.44613357656835917,
						"acc_stderr,none": 0.005004542923278277,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5125490196078432,
						"acc_stderr,none": 0.004949420830381516,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.2969838170591314,
						"acc_stderr,none": 0.0017656547847256618,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3292533659730722,
						"bleu_acc_stderr,none": 0.01645126444006823,
						"bleu_diff,none": -4.529088243896841,
						"bleu_diff_stderr,none": 0.7712398079275519,
						"bleu_max,none": 21.338292513074606,
						"bleu_max_stderr,none": 0.7277940463151109,
						"rouge1_acc,none": 0.2913096695226438,
						"rouge1_acc_stderr,none": 0.015905987048184828,
						"rouge1_diff,none": -7.482931401402743,
						"rouge1_diff_stderr,none": 0.9496821286941034,
						"rouge1_max,none": 43.84161496308325,
						"rouge1_max_stderr,none": 0.9158903747113558,
						"rouge2_acc,none": 0.20685434516523868,
						"rouge2_acc_stderr,none": 0.014179591496728348,
						"rouge2_diff,none": -8.44885859371253,
						"rouge2_diff_stderr,none": 1.0545426263051483,
						"rouge2_max,none": 27.212662956414054,
						"rouge2_max_stderr,none": 1.000997097911372,
						"rougeL_acc,none": 0.2802937576499388,
						"rougeL_acc_stderr,none": 0.01572313952460876,
						"rougeL_diff,none": -7.719185136012378,
						"rougeL_diff_stderr,none": 0.9527244242978985,
						"rougeL_max,none": 41.177249485889,
						"rougeL_max_stderr,none": 0.9104080216212086
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3292533659730722,
						"bleu_acc_stderr,none": 0.01645126444006823,
						"bleu_diff,none": -4.529088243896841,
						"bleu_diff_stderr,none": 0.7712398079275519,
						"bleu_max,none": 21.338292513074606,
						"bleu_max_stderr,none": 0.7277940463151109,
						"rouge1_acc,none": 0.2913096695226438,
						"rouge1_acc_stderr,none": 0.015905987048184828,
						"rouge1_diff,none": -7.482931401402743,
						"rouge1_diff_stderr,none": 0.9496821286941034,
						"rouge1_max,none": 43.84161496308325,
						"rouge1_max_stderr,none": 0.9158903747113558,
						"rouge2_acc,none": 0.20685434516523868,
						"rouge2_acc_stderr,none": 0.014179591496728348,
						"rouge2_diff,none": -8.44885859371253,
						"rouge2_diff_stderr,none": 1.0545426263051483,
						"rouge2_max,none": 27.212662956414054,
						"rouge2_max_stderr,none": 1.000997097911372,
						"rougeL_acc,none": 0.2802937576499388,
						"rougeL_acc_stderr,none": 0.01572313952460876,
						"rougeL_diff,none": -7.719185136012378,
						"rougeL_diff_stderr,none": 0.9527244242978985,
						"rougeL_max,none": 41.177249485889,
						"rougeL_max_stderr,none": 0.9104080216212086
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2178702570379437,
						"acc_stderr,none": 0.014450846714123892,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3760973770803191,
						"acc_stderr,none": 0.013832337383250104,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.03740157480314961,
						"exact_match_stderr,none": 0.004210295288134857
					},
					"wic": {
						"acc,none": 0.5015673981191222,
						"acc_stderr,none": 0.019810623954060382,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.7202951006143281,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6475189970488797,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 14.43743449971529,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.5911602209944752,
						"acc_stderr,none": 0.013816954295135696,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5915492957746479,
						"acc_stderr,none": 0.05875113694257525,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6153846153846154,
						"acc_stderr,none": 0.0479366886807504,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.717948717948718,
						"acc_stderr,none": 0.02728514708163732,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5292727272727272,
						"acc_stderr,none": 0.02931613977250575,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.02235279165091416,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.022175109265613165,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928028,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668093,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.02227969410784342,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269945,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668093,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.564,
						"acc_stderr,none": 0.0221989546414768,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.3789558232931727,
						"acc_stderr,none": 0.04963845077146467,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.336144578313253,
						"acc_stderr,none": 0.009468634669293527,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3530120481927711,
						"acc_stderr,none": 0.009579225840709714,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.43253012048192774,
						"acc_stderr,none": 0.009930409027139455,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3369477911646586,
						"acc_stderr,none": 0.009474203778757715,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5393574297188755,
						"acc_stderr,none": 0.009990976095711876,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.39116465863453814,
						"acc_stderr,none": 0.009781766322010001,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4534136546184739,
						"acc_stderr,none": 0.009978476483838962,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.009476976849778588,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.42891566265060244,
						"acc_stderr,none": 0.00992027312104558,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3542168674698795,
						"acc_stderr,none": 0.009586620142951845,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3381526104417671,
						"acc_stderr,none": 0.009482500057981019,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.36987951807228914,
						"acc_stderr,none": 0.009676749339285938,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3329317269076305,
						"acc_stderr,none": 0.009446051001358226,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3473895582329317,
						"acc_stderr,none": 0.0095438354093349,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3329317269076305,
						"acc_stderr,none": 0.009446051001358226,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5314361350099271,
						"acc_stderr,none": 0.052252798585811375,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.4798146922567836,
						"acc_stderr,none": 0.01285663570649829,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.6915949702183984,
						"acc_stderr,none": 0.011884972073313802,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5651886168100596,
						"acc_stderr,none": 0.012757297463352966,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5016545334215751,
						"acc_stderr,none": 0.012867054869163343,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.49702183984116477,
						"acc_stderr,none": 0.012866897066011242,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.514890800794176,
						"acc_stderr,none": 0.012861417842074004,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.48510919920582396,
						"acc_stderr,none": 0.012861417842074006,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5354070152217075,
						"acc_stderr,none": 0.012834822852860037,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5043017868960953,
						"acc_stderr,none": 0.012866649085718848,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5248180013236268,
						"acc_stderr,none": 0.012851264962354848,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5459960291197882,
						"acc_stderr,none": 0.012812565368728929,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7084738143403012,
						"acc_stderr,none": 0.05612959809115702,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8090322580645162,
						"acc_stderr,none": 0.008153514797981513,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6024096385542169,
						"acc_stderr,none": 0.05404517824786812,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5516162669447341,
						"acc_stderr,none": 0.016067958526765066,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6197718631178707,
						"acc_stderr,none": 0.029990755624373516,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5841269841269842,
						"acc_stderr,none": 0.027814367051292147,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6845238095238095,
						"acc_stderr,none": 0.02072018271174244,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T"
	},
	"aisingapore/sealion3b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5171927846674182,
						"acc_norm,none": 0.49859075535512964,
						"acc_norm_stderr,none": 0.08757349093521882,
						"acc_stderr,none": 0.10939390456049498,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.334375,
						"acc_stderr,none": 0.014450347970381248,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.049400000000000006,
						"acc_stderr,none": 0.05461665373579676,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.7809253731343283,
						"acc_stderr,none": 0.1691012745939898,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.27488855869242196,
						"acc_norm,none": 0.27488855869242196,
						"acc_norm_stderr,none": 0.12108681009400432,
						"acc_stderr,none": 0.12108681009400432,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.25030219305819373,
						"acc_norm,none": 0.25030219305819373,
						"acc_norm_stderr,none": 0.03870120740309609,
						"acc_stderr,none": 0.03870120740309609,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.9091103905784137,
						"likelihood_diff_stderr,none": 0.505440347318558,
						"pct_stereotype,none": 0.5098389982110912,
						"pct_stereotype_stderr,none": 0.10170380186087559
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0718503937007874,
						"exact_match_stderr,none": 0.005730184515948701
					},
					"glue": {
						"acc,none": 0.4483980466888995,
						"acc_stderr,none": 0.0028334743424008758,
						"alias": "glue",
						"f1,none": 0.339683100899478,
						"f1_stderr,none": 0.001775161056713038,
						"mcc,none": -0.03248331055621942,
						"mcc_stderr,none": 0.031446874204362055
					},
					"kmmlu": {
						"acc,none": 0.289922032919434,
						"acc_norm,none": 0.289922032919434,
						"acc_norm_stderr,none": 0.028181923658183262,
						"acc_stderr,none": 0.028181923658183262,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.48015786011839506,
						"acc_norm,none": 0.464,
						"acc_norm_stderr,none": 0.0004984048096192349,
						"acc_stderr,none": 0.03488779926294961,
						"alias": "kobest",
						"f1,none": 0.3733990103247268,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.45002910925674366,
						"acc_stderr,none": 0.011607974802110973,
						"alias": "lambada",
						"perplexity,none": 8.120358044886693,
						"perplexity_stderr,none": 0.7128211791664583
					},
					"lambada_cloze": {
						"acc,none": 0.010867455850960606,
						"acc_stderr,none": 0.0015236322731814486,
						"alias": "lambada_cloze",
						"perplexity,none": 1180.990170895229,
						"perplexity_stderr,none": 239.06392859488466
					},
					"lambada_multilingual": {
						"acc,none": 0.2650494857364642,
						"acc_stderr,none": 0.07832042421824936,
						"alias": "lambada_multilingual",
						"perplexity,none": 301.56153618436906,
						"perplexity_stderr,none": 122.25041790416196
					},
					"mmlu": {
						"acc,none": 0.24305654465175902,
						"acc_stderr,none": 0.036930868857829716,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.24973432518597238,
						"acc_stderr,none": 0.03290048214291232,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2581268104280656,
						"acc_stderr,none": 0.03745195966789567,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2349691257718557,
						"acc_stderr,none": 0.036464942583659364,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2261338407865525,
						"acc_stderr,none": 0.03792556864697487,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2841731724627395,
						"acc_norm,none": 0.25666432823218555,
						"acc_norm_stderr,none": 8.551637421340022e-05,
						"acc_stderr,none": 0.09107735690942864,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5131428571428572,
						"acc_stderr,none": 0.020606885464132158,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.6680831320111656,
						"acc_norm,none": 0.5058123217883,
						"acc_norm_stderr,none": 0.010179813857728832,
						"acc_stderr,none": 0.16447181578298306,
						"alias": "pythia",
						"bits_per_byte,none": 0.7558678955269331,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6886471419915159,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 6.75511211102174,
						"perplexity_stderr,none": 0.18227271984034885,
						"word_perplexity,none": 16.47225610931037,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.37943262411347517,
						"acc_norm,none": 0.4237588652482269,
						"acc_norm_stderr,none": 0.058460943966813936,
						"acc_stderr,none": 0.03871557967307651,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5868357126218762,
						"acc_stderr,none": 0.0504320154297118,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.29115457709716386,
						"acc_stderr,none": 0.001635881417641523,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.38310893512851896,
						"bleu_acc_stderr,none": 0.017018461679389855,
						"bleu_diff,none": 1.3638964471210027,
						"bleu_diff_stderr,none": 0.8632433983524191,
						"bleu_max,none": 23.17592314469333,
						"bleu_max_stderr,none": 0.7705199444430939,
						"rouge1_acc,none": 0.379436964504284,
						"rouge1_acc_stderr,none": 0.016987039266142985,
						"rouge1_diff,none": 1.5514230336845742,
						"rouge1_diff_stderr,none": 1.1718627062272666,
						"rouge1_max,none": 47.240232935223176,
						"rouge1_max_stderr,none": 0.9284398908010626,
						"rouge2_acc,none": 0.3011015911872705,
						"rouge2_acc_stderr,none": 0.016058999026100612,
						"rouge2_diff,none": 1.5445848533033304,
						"rouge2_diff_stderr,none": 1.2442679354094344,
						"rouge2_max,none": 32.053173111877065,
						"rouge2_max_stderr,none": 1.0561632909441534,
						"rougeL_acc,none": 0.379436964504284,
						"rougeL_acc_stderr,none": 0.01698703926614298,
						"rougeL_diff,none": 1.6657629586959137,
						"rougeL_diff_stderr,none": 1.1786314310856658,
						"rougeL_max,none": 44.731411115557066,
						"rougeL_max_stderr,none": 0.9472465952140484
					},
					"xcopa": {
						"acc,none": 0.5592727272727273,
						"acc_stderr,none": 0.05449048831875534,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.37269076305220883,
						"acc_stderr,none": 0.03472708129682387,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5490042717044703,
						"acc_stderr,none": 0.058819532506748554,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.6781299168352439,
						"acc_stderr,none": 0.050686065221542786,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5171927846674182,
						"acc_norm,none": 0.49859075535512964,
						"acc_norm_stderr,none": 0.08757349093521882,
						"acc_stderr,none": 0.10939390456049498,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.334375,
						"acc_stderr,none": 0.014450347970381248,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.01493311749093257,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.334,
						"acc_stderr,none": 0.014922019523732961,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.33416666666666667,
						"acc_stderr,none": 0.01362243481313678,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.2858361774744027,
						"acc_norm,none": 0.31399317406143346,
						"acc_norm_stderr,none": 0.01356269122472628,
						"acc_stderr,none": 0.01320319608853737,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6313131313131313,
						"acc_norm,none": 0.5896464646464646,
						"acc_norm_stderr,none": 0.010093531255765467,
						"acc_stderr,none": 0.009899640855681045,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.049400000000000006,
						"acc_stderr,none": 0.05461665373579676,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0525,
						"acc_stderr,none": 0.004988418302285769,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.038,
						"acc_stderr,none": 0.004276346989170344,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.09,
						"acc_stderr,none": 0.006400819107162981,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.2205,
						"acc_stderr,none": 0.009272694669469965,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0145,
						"acc_stderr,none": 0.0026736583971427533,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0665,
						"acc_stderr,none": 0.005572647683202397,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.003,
						"acc_stderr,none": 0.0012232122154646914,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.008,
						"acc_stderr,none": 0.0019924821184884632,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.0007069298939339433,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000099,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.005639913232104121,
						"acc_stderr,none": 0.0015601516534579519,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.7809253731343283,
						"acc_stderr,none": 0.1691012745939898,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037191,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.0039698563903194225,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256572,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.789,
						"acc_stderr,none": 0.012909130321042094,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787726,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.014267009061031307,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.015605111967541946,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.013877773329774164,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.843,
						"acc_stderr,none": 0.011510146979230192,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036333,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.0066959566781630425,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400243,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491134,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248104,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847169,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936717,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.012953717566737242,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.603,
						"acc_stderr,none": 0.015480007449307992,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.684,
						"acc_stderr,none": 0.014709193056057127,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.741,
						"acc_stderr,none": 0.013860415257527911,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904614,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.824,
						"acc_stderr,none": 0.012048616898597491,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565634,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.206,
						"acc_stderr,none": 0.012795613612786529,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491129,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.752,
						"acc_stderr,none": 0.013663187134877642,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.591,
						"acc_stderr,none": 0.015555094373257942,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.014442734941575018,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897908,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.822,
						"acc_stderr,none": 0.01210216767618358,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.673,
						"acc_stderr,none": 0.01484221315341124,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.844,
						"acc_stderr,none": 0.011480235006122358,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.0146326386586329,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.01575221038877184,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.01576859691439438,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.795,
						"acc_stderr,none": 0.012772554096113123,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.463,
						"acc_stderr,none": 0.015775927227262416,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.866,
						"acc_stderr,none": 0.010777762298369683,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785136,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.671,
						"acc_stderr,none": 0.014865395385928355,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000076,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.0098200016513457,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.826,
						"acc_stderr,none": 0.011994493230973433,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.727,
						"acc_stderr,none": 0.014095022868717581,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.485,
						"acc_stderr,none": 0.015812179641814906,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904616,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.015070604603768412,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.42,
						"acc_stderr,none": 0.015615500115072956,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992436,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.010131468138756986,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.843,
						"acc_stderr,none": 0.01151014697923021,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.866,
						"acc_stderr,none": 0.010777762298369674,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.014414290540008215,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.813,
						"acc_stderr,none": 0.012336254828074112,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103298,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378216,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817137,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942338,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.388,
						"acc_stderr,none": 0.015417317979911072,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.015167928865407557,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6278287461773701,
						"acc_stderr,none": 0.0084544342473739,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.4107142857142857,
						"acc_stderr,none": 0.0663363415035954,
						"alias": "cb",
						"f1,none": 0.1940928270042194,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.27488855869242196,
						"acc_norm,none": 0.27488855869242196,
						"acc_norm_stderr,none": 0.12108681009400432,
						"acc_stderr,none": 0.12108681009400432,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.06652247352247599,
						"acc_stderr,none": 0.06652247352247599,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.07226812131946557,
						"acc_stderr,none": 0.07226812131946557,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.15151515151515152,
						"acc_norm,none": 0.15151515151515152,
						"acc_norm_stderr,none": 0.06338333534349055,
						"acc_stderr,none": 0.06338333534349055,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.10154334054280735,
						"acc_stderr,none": 0.10154334054280735,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2978723404255319,
						"acc_norm,none": 0.2978723404255319,
						"acc_norm_stderr,none": 0.06742861107915606,
						"acc_stderr,none": 0.06742861107915606,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.05443310539518172,
						"acc_stderr,none": 0.05443310539518172,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.052631578947368404,
						"acc_stderr,none": 0.052631578947368404,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.07802030664724673,
						"acc_stderr,none": 0.07802030664724673,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755135,
						"acc_stderr,none": 0.08780518530755135,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.40540540540540543,
						"acc_norm,none": 0.40540540540540543,
						"acc_norm_stderr,none": 0.08182838794858086,
						"acc_stderr,none": 0.08182838794858086,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063838,
						"acc_stderr,none": 0.07213122508063838,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031766,
						"acc_stderr,none": 0.07633651333031766,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.085947008518708,
						"acc_stderr,none": 0.085947008518708,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.05,
						"acc_norm,none": 0.05,
						"acc_norm_stderr,none": 0.04999999999999999,
						"acc_stderr,none": 0.04999999999999999,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.3888888888888889,
						"acc_norm,none": 0.3888888888888889,
						"acc_norm_stderr,none": 0.11823563735376173,
						"acc_stderr,none": 0.11823563735376173,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.052631578947368404,
						"acc_stderr,none": 0.052631578947368404,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.10094660663590604,
						"acc_stderr,none": 0.10094660663590604,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.10154334054280735,
						"acc_stderr,none": 0.10154334054280735,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252413,
						"acc_stderr,none": 0.09477598811252413,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756935,
						"acc_stderr,none": 0.10513149660756935,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.1486470975026408,
						"acc_stderr,none": 0.1486470975026408,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.1049727762162956,
						"acc_stderr,none": 0.1049727762162956,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522557,
						"acc_stderr,none": 0.11369720523522557,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.1010152544552211,
						"acc_stderr,none": 0.1010152544552211,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.4782608695652174,
						"acc_norm,none": 0.4782608695652174,
						"acc_norm_stderr,none": 0.10649955403405124,
						"acc_stderr,none": 0.10649955403405124,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141221,
						"acc_stderr,none": 0.06372446937141221,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.1008316903303367,
						"acc_stderr,none": 0.1008316903303367,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.1724137931034483,
						"acc_norm,none": 0.1724137931034483,
						"acc_norm_stderr,none": 0.0713860923457608,
						"acc_stderr,none": 0.0713860923457608,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.3469387755102041,
						"acc_norm,none": 0.3469387755102041,
						"acc_norm_stderr,none": 0.06870411522695292,
						"acc_stderr,none": 0.06870411522695292,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.06791703342160259,
						"acc_stderr,none": 0.06791703342160259,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.06545849153992007,
						"acc_stderr,none": 0.06545849153992007,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996394,
						"acc_stderr,none": 0.08081046758996394,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.25030219305819373,
						"acc_norm,none": 0.25030219305819373,
						"acc_norm_stderr,none": 0.03870120740309609,
						"acc_stderr,none": 0.03870120740309609,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2635135135135135,
						"acc_norm,none": 0.2635135135135135,
						"acc_norm_stderr,none": 0.03633500043381988,
						"acc_stderr,none": 0.03633500043381988,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.03304756158810785,
						"acc_stderr,none": 0.03304756158810785,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03346409881055953,
						"acc_stderr,none": 0.03346409881055953,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2679425837320574,
						"acc_norm,none": 0.2679425837320574,
						"acc_norm_stderr,none": 0.030708724295561367,
						"acc_stderr,none": 0.030708724295561367,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2125,
						"acc_norm,none": 0.2125,
						"acc_norm_stderr,none": 0.03244189290245473,
						"acc_stderr,none": 0.03244189290245473,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2824427480916031,
						"acc_norm,none": 0.2824427480916031,
						"acc_norm_stderr,none": 0.03948406125768361,
						"acc_stderr,none": 0.03948406125768361,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.037970424962817856,
						"acc_stderr,none": 0.037970424962817856,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2616822429906542,
						"acc_norm,none": 0.2616822429906542,
						"acc_norm_stderr,none": 0.04269291915728109,
						"acc_stderr,none": 0.04269291915728109,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.23839009287925697,
						"acc_norm,none": 0.23839009287925697,
						"acc_norm_stderr,none": 0.023745562285389366,
						"acc_stderr,none": 0.023745562285389366,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.23529411764705882,
						"acc_norm,none": 0.23529411764705882,
						"acc_norm_stderr,none": 0.029771775228145628,
						"acc_stderr,none": 0.029771775228145628,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.24581005586592178,
						"acc_norm,none": 0.24581005586592178,
						"acc_norm_stderr,none": 0.032272320235413,
						"acc_stderr,none": 0.032272320235413,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.27848101265822783,
						"acc_norm,none": 0.27848101265822783,
						"acc_norm_stderr,none": 0.029178682304842555,
						"acc_stderr,none": 0.029178682304842555,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.04022559246936713,
						"acc_stderr,none": 0.04022559246936713,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.2336448598130841,
						"acc_norm,none": 0.2336448598130841,
						"acc_norm_stderr,none": 0.041099848424639984,
						"acc_stderr,none": 0.041099848424639984,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.04557239513497752,
						"acc_stderr,none": 0.04557239513497752,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.045521571818039494,
						"acc_stderr,none": 0.045521571818039494,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800374,
						"acc_stderr,none": 0.04142972007800374,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2490842490842491,
						"acc_norm,none": 0.2490842490842491,
						"acc_norm_stderr,none": 0.026223115500506114,
						"acc_stderr,none": 0.026223115500506114,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.23529411764705882,
						"acc_norm,none": 0.23529411764705882,
						"acc_norm_stderr,none": 0.02977177522814565,
						"acc_stderr,none": 0.02977177522814565,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.23976608187134502,
						"acc_norm,none": 0.23976608187134502,
						"acc_norm_stderr,none": 0.03274485211946956,
						"acc_stderr,none": 0.03274485211946956,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2302158273381295,
						"acc_norm,none": 0.2302158273381295,
						"acc_norm_stderr,none": 0.03583542294357026,
						"acc_stderr,none": 0.03583542294357026,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.2578616352201258,
						"acc_norm,none": 0.2578616352201258,
						"acc_norm_stderr,none": 0.03480224533547635,
						"acc_stderr,none": 0.03480224533547635,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.27607361963190186,
						"acc_norm,none": 0.27607361963190186,
						"acc_norm_stderr,none": 0.03512385283705051,
						"acc_stderr,none": 0.03512385283705051,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.22093023255813954,
						"acc_norm,none": 0.22093023255813954,
						"acc_norm_stderr,none": 0.03172617353438932,
						"acc_stderr,none": 0.03172617353438932,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.20634920634920634,
						"acc_norm,none": 0.20634920634920634,
						"acc_norm_stderr,none": 0.02554343316084326,
						"acc_stderr,none": 0.02554343316084326,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.23737373737373738,
						"acc_norm,none": 0.23737373737373738,
						"acc_norm_stderr,none": 0.03031371053819888,
						"acc_stderr,none": 0.03031371053819888,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.25630252100840334,
						"acc_norm,none": 0.25630252100840334,
						"acc_norm_stderr,none": 0.02835962087053395,
						"acc_stderr,none": 0.02835962087053395,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.02836109930007507,
						"acc_stderr,none": 0.02836109930007507,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.22962962962962963,
						"acc_norm,none": 0.22962962962962963,
						"acc_norm_stderr,none": 0.03633384414073465,
						"acc_stderr,none": 0.03633384414073465,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.24475524475524477,
						"acc_norm,none": 0.24475524475524477,
						"acc_norm_stderr,none": 0.03607993033081376,
						"acc_stderr,none": 0.03607993033081376,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.23863636363636365,
						"acc_norm,none": 0.23863636363636365,
						"acc_norm_stderr,none": 0.03222147017899509,
						"acc_stderr,none": 0.03222147017899509,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.28187919463087246,
						"acc_norm,none": 0.28187919463087246,
						"acc_norm_stderr,none": 0.036982767559851,
						"acc_stderr,none": 0.036982767559851,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.039803298549204315,
						"acc_stderr,none": 0.039803298549204315,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.20121951219512196,
						"acc_norm,none": 0.20121951219512196,
						"acc_norm_stderr,none": 0.03140183067883277,
						"acc_stderr,none": 0.03140183067883277,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.27972027972027974,
						"acc_norm,none": 0.27972027972027974,
						"acc_norm_stderr,none": 0.03766763889539852,
						"acc_stderr,none": 0.03766763889539852,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.038932596106046734,
						"acc_stderr,none": 0.038932596106046734,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.03162930395697948,
						"acc_stderr,none": 0.03162930395697948,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.03407826167337436,
						"acc_stderr,none": 0.03407826167337436,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25547445255474455,
						"acc_norm,none": 0.25547445255474455,
						"acc_norm_stderr,none": 0.02153880540239956,
						"acc_stderr,none": 0.02153880540239956,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.24766355140186916,
						"acc_norm,none": 0.24766355140186916,
						"acc_norm_stderr,none": 0.029576535293164483,
						"acc_stderr,none": 0.029576535293164483,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.25203252032520324,
						"acc_norm,none": 0.25203252032520324,
						"acc_norm_stderr,none": 0.03930879526823993,
						"acc_stderr,none": 0.03930879526823993,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2459016393442623,
						"acc_norm,none": 0.2459016393442623,
						"acc_norm_stderr,none": 0.03914731903595734,
						"acc_stderr,none": 0.03914731903595734,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.029461344042368907,
						"acc_stderr,none": 0.029461344042368907,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.22777777777777777,
						"acc_norm,none": 0.22777777777777777,
						"acc_norm_stderr,none": 0.03134731210385171,
						"acc_stderr,none": 0.03134731210385171,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.031961071380099675,
						"acc_stderr,none": 0.031961071380099675,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.0431409132531879,
						"acc_stderr,none": 0.0431409132531879,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2689655172413793,
						"acc_norm,none": 0.2689655172413793,
						"acc_norm_stderr,none": 0.03695183311650232,
						"acc_stderr,none": 0.03695183311650232,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.041764667586049006,
						"acc_stderr,none": 0.041764667586049006,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26857142857142857,
						"acc_norm,none": 0.26857142857142857,
						"acc_norm_stderr,none": 0.033600151915923894,
						"acc_stderr,none": 0.033600151915923894,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.24644549763033174,
						"acc_norm,none": 0.24644549763033174,
						"acc_norm_stderr,none": 0.029737751726596835,
						"acc_stderr,none": 0.029737751726596835,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.24468085106382978,
						"acc_norm,none": 0.24468085106382978,
						"acc_norm_stderr,none": 0.022199827758281318,
						"acc_stderr,none": 0.022199827758281318,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.028490144114909487,
						"acc_stderr,none": 0.028490144114909487,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.033291151121447815,
						"acc_stderr,none": 0.033291151121447815,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.23703703703703705,
						"acc_norm,none": 0.23703703703703705,
						"acc_norm_stderr,none": 0.03673731683969506,
						"acc_stderr,none": 0.03673731683969506,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.26991150442477874,
						"acc_norm,none": 0.26991150442477874,
						"acc_norm_stderr,none": 0.02959423999541737,
						"acc_stderr,none": 0.02959423999541737,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.22424242424242424,
						"acc_norm,none": 0.22424242424242424,
						"acc_norm_stderr,none": 0.03256866661681102,
						"acc_stderr,none": 0.03256866661681102,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.032793177922689494,
						"acc_stderr,none": 0.032793177922689494,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.03471460744058984,
						"acc_stderr,none": 0.03471460744058984,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.03248331055621942,
						"mcc_stderr,none": 0.031446874204362055
					},
					"copa": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.040201512610368445,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.9091103905784137,
						"likelihood_diff_stderr,none": 0.505440347318558,
						"pct_stereotype,none": 0.5098389982110912,
						"pct_stereotype_stderr,none": 0.10170380186087559
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.804263565891473,
						"likelihood_diff_stderr,none": 0.08766733065803171,
						"pct_stereotype,none": 0.5951103160405486,
						"pct_stereotype_stderr,none": 0.011990301067087145
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.028846153846154,
						"likelihood_diff_stderr,none": 0.36769999315053076,
						"pct_stereotype,none": 0.6483516483516484,
						"pct_stereotype_stderr,none": 0.050331323186278906
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.113636363636363,
						"likelihood_diff_stderr,none": 2.0597229613921266,
						"pct_stereotype,none": 0.5454545454545454,
						"pct_stereotype_stderr,none": 0.1574591643244434
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.75,
						"likelihood_diff_stderr,none": 0.6629352706538725,
						"pct_stereotype,none": 0.676923076923077,
						"pct_stereotype_stderr,none": 0.05845647751373333
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 3.1421875,
						"likelihood_diff_stderr,none": 0.19044752115624203,
						"pct_stereotype,none": 0.6125,
						"pct_stereotype_stderr,none": 0.027276808733259977
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.6475694444444446,
						"likelihood_diff_stderr,none": 0.22969310447390054,
						"pct_stereotype,none": 0.5787037037037037,
						"pct_stereotype_stderr,none": 0.03367462138896078
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.888888888888889,
						"likelihood_diff_stderr,none": 0.3406578232842618,
						"pct_stereotype,none": 0.6527777777777778,
						"pct_stereotype_stderr,none": 0.05650114676852965
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.5236220472440944,
						"likelihood_diff_stderr,none": 0.14237011055250834,
						"pct_stereotype,none": 0.46653543307086615,
						"pct_stereotype_stderr,none": 0.022155988267174086
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.7454954954954953,
						"likelihood_diff_stderr,none": 0.35769134702092015,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.04494665749754944
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.365591397849462,
						"likelihood_diff_stderr,none": 0.3940549753155748,
						"pct_stereotype,none": 0.8387096774193549,
						"pct_stereotype_stderr,none": 0.03834564688497146
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.425,
						"likelihood_diff_stderr,none": 0.2488989957966983,
						"pct_stereotype,none": 0.6526315789473685,
						"pct_stereotype_stderr,none": 0.03463365347393427
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 4.018671735241503,
						"likelihood_diff_stderr,none": 0.09593920083952512,
						"pct_stereotype,none": 0.42993440667859273,
						"pct_stereotype_stderr,none": 0.012092789934357112
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.911111111111111,
						"likelihood_diff_stderr,none": 0.3994243499674594,
						"pct_stereotype,none": 0.37777777777777777,
						"pct_stereotype_stderr,none": 0.051392052067171366
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.4615384615384617,
						"likelihood_diff_stderr,none": 0.8183816473443544,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.234848484848484,
						"likelihood_diff_stderr,none": 0.47577928921691215,
						"pct_stereotype,none": 0.5151515151515151,
						"pct_stereotype_stderr,none": 0.06198888629778894
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.2281931464174454,
						"likelihood_diff_stderr,none": 0.17813713892683955,
						"pct_stereotype,none": 0.43302180685358255,
						"pct_stereotype_stderr,none": 0.02769893509024992
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.913043478260869,
						"likelihood_diff_stderr,none": 0.25367343080099936,
						"pct_stereotype,none": 0.25691699604743085,
						"pct_stereotype_stderr,none": 0.027524206747893333
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 4.239583333333333,
						"likelihood_diff_stderr,none": 0.5365740536796012,
						"pct_stereotype,none": 0.4305555555555556,
						"pct_stereotype_stderr,none": 0.05876396677084613
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.251086956521739,
						"likelihood_diff_stderr,none": 0.1754640318904927,
						"pct_stereotype,none": 0.31521739130434784,
						"pct_stereotype_stderr,none": 0.021685782795018992
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 4.269565217391304,
						"likelihood_diff_stderr,none": 0.4349257964066157,
						"pct_stereotype,none": 0.5826086956521739,
						"pct_stereotype_stderr,none": 0.04618572379512263
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 4.78021978021978,
						"likelihood_diff_stderr,none": 0.4003172422681681,
						"pct_stereotype,none": 0.7362637362637363,
						"pct_stereotype_stderr,none": 0.04644942852497396
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.934948979591836,
						"likelihood_diff_stderr,none": 0.28791997057886043,
						"pct_stereotype,none": 0.6275510204081632,
						"pct_stereotype_stderr,none": 0.03462107977939841
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0718503937007874,
						"exact_match_stderr,none": 0.005730184515948701
					},
					"glue": {
						"acc,none": 0.4483980466888995,
						"acc_stderr,none": 0.0028334743424008758,
						"alias": "glue",
						"f1,none": 0.339683100899478,
						"f1_stderr,none": 0.001775161056713038,
						"mcc,none": -0.03248331055621942,
						"mcc_stderr,none": 0.031446874204362055
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.019711902956785442,
						"exact_match_stderr,get-answer": 0.003828982978735702
					},
					"hellaswag": {
						"acc,none": 0.476000796654053,
						"acc_norm,none": 0.6325433180641307,
						"acc_norm_stderr,none": 0.004811269975450583,
						"acc_stderr,none": 0.00498403025050728,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.289922032919434,
						"acc_norm,none": 0.289922032919434,
						"acc_norm_stderr,none": 0.028181923658183262,
						"acc_stderr,none": 0.028181923658183262,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.04512608598542127,
						"acc_stderr,none": 0.04512608598542127,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.014127086556490526,
						"acc_stderr,none": 0.014127086556490526,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.296,
						"acc_norm,none": 0.296,
						"acc_norm_stderr,none": 0.014442734941575022,
						"acc_stderr,none": 0.014442734941575022,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.279,
						"acc_norm,none": 0.279,
						"acc_norm_stderr,none": 0.014190150117612032,
						"acc_stderr,none": 0.014190150117612032,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.25333333333333335,
						"acc_norm,none": 0.25333333333333335,
						"acc_norm_stderr,none": 0.01777035645506745,
						"acc_stderr,none": 0.01777035645506745,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.318,
						"acc_norm,none": 0.318,
						"acc_norm_stderr,none": 0.014734079309311901,
						"acc_stderr,none": 0.014734079309311901,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.01432694179723156,
						"acc_stderr,none": 0.01432694179723156,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361427,
						"acc_stderr,none": 0.014498627873361427,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.031828687164775826,
						"acc_stderr,none": 0.031828687164775826,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.312,
						"acc_norm,none": 0.312,
						"acc_norm_stderr,none": 0.014658474370509003,
						"acc_stderr,none": 0.014658474370509003,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.23846153846153847,
						"acc_norm,none": 0.23846153846153847,
						"acc_norm_stderr,none": 0.03751977598816765,
						"acc_stderr,none": 0.03751977598816765,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.04163331998932268,
						"acc_stderr,none": 0.04163331998932268,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.327,
						"acc_norm,none": 0.327,
						"acc_norm_stderr,none": 0.014842213153411252,
						"acc_stderr,none": 0.014842213153411252,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.342,
						"acc_norm,none": 0.342,
						"acc_norm_stderr,none": 0.015008706182121726,
						"acc_stderr,none": 0.015008706182121726,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881424,
						"acc_stderr,none": 0.013588548437881424,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.014428554438445507,
						"acc_stderr,none": 0.014428554438445507,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.014205696104091512,
						"acc_stderr,none": 0.014205696104091512,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.319,
						"acc_norm,none": 0.319,
						"acc_norm_stderr,none": 0.014746404865473486,
						"acc_stderr,none": 0.014746404865473486,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.014428554438445502,
						"acc_stderr,none": 0.014428554438445502,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.307,
						"acc_norm,none": 0.307,
						"acc_norm_stderr,none": 0.014593284892852628,
						"acc_stderr,none": 0.014593284892852628,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909283,
						"acc_stderr,none": 0.04292346959909283,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.014512395033543147,
						"acc_stderr,none": 0.014512395033543147,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.312,
						"acc_norm,none": 0.312,
						"acc_norm_stderr,none": 0.014658474370509003,
						"acc_stderr,none": 0.014658474370509003,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.01422115470843493,
						"acc_stderr,none": 0.01422115470843493,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.013414729030247104,
						"acc_stderr,none": 0.013414729030247104,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.293,
						"acc_norm,none": 0.293,
						"acc_norm_stderr,none": 0.014399942998441268,
						"acc_stderr,none": 0.014399942998441268,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440469,
						"acc_stderr,none": 0.013946271849440469,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.27666666666666667,
						"acc_norm,none": 0.27666666666666667,
						"acc_norm_stderr,none": 0.018278234601220907,
						"acc_stderr,none": 0.018278234601220907,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.328,
						"acc_norm,none": 0.328,
						"acc_norm_stderr,none": 0.014853842487270334,
						"acc_stderr,none": 0.014853842487270334,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.01386041525752791,
						"acc_stderr,none": 0.01386041525752791,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.014428554438445507,
						"acc_stderr,none": 0.014428554438445507,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.31,
						"acc_norm,none": 0.31,
						"acc_norm_stderr,none": 0.014632638658632905,
						"acc_stderr,none": 0.014632638658632905,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.040201512610368445,
						"acc_stderr,none": 0.040201512610368445,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.025674838352260615,
						"acc_stderr,none": 0.025674838352260615,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462618,
						"acc_stderr,none": 0.014078856992462618,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.293,
						"acc_norm,none": 0.293,
						"acc_norm_stderr,none": 0.014399942998441273,
						"acc_stderr,none": 0.014399942998441273,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.287,
						"acc_norm,none": 0.287,
						"acc_norm_stderr,none": 0.014312087053809965,
						"acc_stderr,none": 0.014312087053809965,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.03263741725420569,
						"acc_stderr,none": 0.03263741725420569,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.297,
						"acc_norm,none": 0.297,
						"acc_norm_stderr,none": 0.0144568322948011,
						"acc_stderr,none": 0.0144568322948011,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.279,
						"acc_norm,none": 0.279,
						"acc_norm_stderr,none": 0.014190150117612035,
						"acc_stderr,none": 0.014190150117612035,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.03165255790786194,
						"acc_stderr,none": 0.03165255790786194,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.311,
						"acc_norm,none": 0.311,
						"acc_norm_stderr,none": 0.014645596385722694,
						"acc_stderr,none": 0.014645596385722694,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.48015786011839506,
						"acc_norm,none": 0.464,
						"acc_norm_stderr,none": 0.0004984048096192349,
						"acc_stderr,none": 0.03488779926294961,
						"alias": "kobest",
						"f1,none": 0.3733990103247268,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701182,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.486,
						"acc_stderr,none": 0.015813097547730984,
						"alias": " - kobest_copa",
						"f1,none": 0.485473124479467,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.366,
						"acc_norm,none": 0.464,
						"acc_norm_stderr,none": 0.02232498173838525,
						"acc_stderr,none": 0.021564276850201618,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3625161193629773,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5062972292191436,
						"acc_stderr,none": 0.02512395255890725,
						"alias": " - kobest_sentineg",
						"f1,none": 0.3872298872298872,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604038,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.45002910925674366,
						"acc_stderr,none": 0.011607974802110973,
						"alias": "lambada",
						"perplexity,none": 8.120358044886693,
						"perplexity_stderr,none": 0.7128211791664583
					},
					"lambada_cloze": {
						"acc,none": 0.010867455850960606,
						"acc_stderr,none": 0.0015236322731814486,
						"alias": "lambada_cloze",
						"perplexity,none": 1180.990170895229,
						"perplexity_stderr,none": 239.06392859488466
					},
					"lambada_multilingual": {
						"acc,none": 0.2650494857364642,
						"acc_stderr,none": 0.07832042421824936,
						"alias": "lambada_multilingual",
						"perplexity,none": 301.56153618436906,
						"perplexity_stderr,none": 122.25041790416196
					},
					"lambada_openai": {
						"acc,none": 0.4758393169027751,
						"acc_stderr,none": 0.006957840284118757,
						"alias": " - lambada_openai",
						"perplexity,none": 6.75511211102174,
						"perplexity_stderr,none": 0.18227271984034885
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.011837764409082088,
						"acc_stderr,none": 0.0015068188529755334,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 711.5142958629096,
						"perplexity_stderr,none": 27.026966666472862
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.19755482243353387,
						"acc_stderr,none": 0.00554706910480584,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 421.88190652980865,
						"perplexity_stderr,none": 28.784922318661916
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.46788278672617895,
						"acc_stderr,none": 0.0069515917762973094,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 6.8442916630935855,
						"perplexity_stderr,none": 0.18615473965561613
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.18746361342907045,
						"acc_stderr,none": 0.00543740865328392,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 390.55806473654144,
						"perplexity_stderr,none": 24.23109602285033
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.25654958276732,
						"acc_stderr,none": 0.006084483727167683,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 222.7621421599715,
						"perplexity_stderr,none": 14.12793464353467
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.21579662332621774,
						"acc_stderr,none": 0.005731242031755276,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 465.7612758324303,
						"perplexity_stderr,none": 31.396293847479033
					},
					"lambada_standard": {
						"acc,none": 0.4313991849408112,
						"acc_stderr,none": 0.006900101383454351,
						"alias": " - lambada_standard",
						"perplexity,none": 9.470125016584696,
						"perplexity_stderr,none": 0.26885341818682185
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.009897147292839123,
						"acc_stderr,none": 0.00137913647764537,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 1650.4660459275487,
						"perplexity_stderr,none": 57.95317901878976
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.255089058524173,
						"exact_match_stderr,get-answer": 0.010997905904062626
					},
					"logiqa": {
						"acc,none": 0.21505376344086022,
						"acc_norm,none": 0.26881720430107525,
						"acc_norm_stderr,none": 0.017389409463712625,
						"acc_stderr,none": 0.016115240864129177,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.23791348600508905,
						"acc_norm,none": 0.27989821882951654,
						"acc_norm_stderr,none": 0.011326843954481582,
						"acc_stderr,none": 0.010742950531023884,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.23651591289782245,
						"acc_norm,none": 0.23651591289782245,
						"acc_norm_stderr,none": 0.007779125325665779,
						"acc_stderr,none": 0.007779125325665786,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.4748993857233637,
						"acc_stderr,none": 0.005139407669364335,
						"alias": "mc_taco",
						"f1,none": 0.48759818106655645,
						"f1_stderr,none": 0.0062634603551213145
					},
					"medmcqa": {
						"acc,none": 0.24958163997131247,
						"acc_norm,none": 0.24958163997131247,
						"acc_norm_stderr,none": 0.006692155571156369,
						"acc_stderr,none": 0.006692155571156369,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.2537313432835821,
						"acc_norm,none": 0.2537313432835821,
						"acc_norm_stderr,none": 0.012200878561490575,
						"acc_stderr,none": 0.012200878561490575,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.24305654465175902,
						"acc_stderr,none": 0.036930868857829716,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.21481481481481482,
						"acc_stderr,none": 0.03547854198560824,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.23026315789473684,
						"acc_stderr,none": 0.034260594244031654,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847415,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.22264150943396227,
						"acc_stderr,none": 0.0256042334708991,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.03476590104304134,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816507,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2254335260115607,
						"acc_stderr,none": 0.03186209851641144,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.04336432707993176,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.25957446808510637,
						"acc_stderr,none": 0.028659179374292323,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.04142439719489362,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2482758620689655,
						"acc_stderr,none": 0.03600105692727771,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.20634920634920634,
						"acc_stderr,none": 0.02084229093011467,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.040735243221471255,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036846,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.22580645161290322,
						"acc_stderr,none": 0.023785577884181012,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.1921182266009852,
						"acc_stderr,none": 0.02771931570961477,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.24242424242424243,
						"acc_stderr,none": 0.03346409881055953,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.23737373737373738,
						"acc_stderr,none": 0.0303137105381989,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.21761658031088082,
						"acc_stderr,none": 0.02977866303775296,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2153846153846154,
						"acc_stderr,none": 0.020843034557462874,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.22962962962962963,
						"acc_stderr,none": 0.025644108639267603,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2184873949579832,
						"acc_stderr,none": 0.02684151432295894,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2251655629139073,
						"acc_stderr,none": 0.03410435282008936,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.1871559633027523,
						"acc_stderr,none": 0.01672268452620014,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.1574074074074074,
						"acc_stderr,none": 0.02483717351824239,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.030190282453501936,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.31645569620253167,
						"acc_stderr,none": 0.03027497488021897,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.30493273542600896,
						"acc_stderr,none": 0.03089861088247751,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.29770992366412213,
						"acc_stderr,none": 0.04010358942462203,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.24973432518597238,
						"acc_stderr,none": 0.03290048214291232,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2066115702479339,
						"acc_stderr,none": 0.03695980128098826,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.04133119440243839,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.25153374233128833,
						"acc_stderr,none": 0.034089978868575295,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.04287858751340456,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.21359223300970873,
						"acc_stderr,none": 0.040580420156460364,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.02934311479809446,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.26053639846743293,
						"acc_stderr,none": 0.015696008563807096,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.23121387283236994,
						"acc_stderr,none": 0.022698657167855713,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23687150837988827,
						"acc_stderr,none": 0.014219570788103984,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2973856209150327,
						"acc_stderr,none": 0.026173908506718576,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2581268104280656,
						"acc_stderr,none": 0.03745195966789567,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.18971061093247588,
						"acc_stderr,none": 0.022268196258783214,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.02409347123262133,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2801418439716312,
						"acc_stderr,none": 0.02678917235114024,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2588005215123859,
						"acc_stderr,none": 0.011186109046564611,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.21323529411764705,
						"acc_stderr,none": 0.024880971512294247,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.24673202614379086,
						"acc_stderr,none": 0.017440820367402507,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.22727272727272727,
						"acc_stderr,none": 0.04013964554072774,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2571428571428571,
						"acc_stderr,none": 0.02797982353874455,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2349691257718557,
						"acc_stderr,none": 0.036464942583659364,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.2835820895522388,
						"acc_stderr,none": 0.03187187537919797,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2261338407865525,
						"acc_stderr,none": 0.03792556864697487,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.25903614457831325,
						"acc_stderr,none": 0.03410646614071856,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.30409356725146197,
						"acc_stderr,none": 0.03528211258245231,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.35476311767702495,
						"acc_stderr,none": 0.004829540325197781,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.35465825874694873,
						"acc_stderr,none": 0.0048250416769568625,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6544117647058824,
						"acc_stderr,none": 0.02357261804873991,
						"alias": "mrpc",
						"f1,none": 0.7847328244274809,
						"f1_stderr,none": 0.017761593624350907
					},
					"multimedqa": {
						"acc,none": 0.2841731724627395,
						"acc_norm,none": 0.25666432823218555,
						"acc_norm_stderr,none": 8.551637421340022e-05,
						"acc_stderr,none": 0.09107735690942864,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5719884488448845,
						"acc_stderr,none": 0.00710697625275154,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.670428893905192,
						"mrr_stderr,none": 0.010392183579316521,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407435,
						"r@2,none": 0.43340857787810383,
						"r@2_stderr,none": 0.01665758789450121
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6388261869179745,
						"mrr_stderr,none": 0.01047575594468882,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750964,
						"r@2,none": 0.4672686230248307,
						"r@2_stderr,none": 0.016771264669080584
					},
					"openbookqa": {
						"acc,none": 0.262,
						"acc_norm,none": 0.376,
						"acc_norm_stderr,none": 0.021683827539286122,
						"acc_stderr,none": 0.019684688820194713,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.5345,
						"acc_stderr,none": 0.011156482803925174,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.471,
						"acc_stderr,none": 0.011164310140373722,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4985,
						"acc_stderr,none": 0.011183085696839198,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5215,
						"acc_stderr,none": 0.011172792428275121,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5095,
						"acc_stderr,none": 0.011181117282805216,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.541,
						"acc_stderr,none": 0.011145474902641256,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.011177408788874894,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5131428571428572,
						"acc_stderr,none": 0.020606885464132158,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7399347116430903,
						"acc_norm,none": 0.7464635473340587,
						"acc_norm_stderr,none": 0.01015009083455178,
						"acc_stderr,none": 0.0102348932490613,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.22640905209222886,
						"acc_norm,none": 0.26008753202391116,
						"acc_norm_stderr,none": 0.0032049635220161524,
						"acc_stderr,none": 0.003057565251869512,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.628,
						"acc_stderr,none": 0.02163719798572238,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.6680831320111656,
						"acc_norm,none": 0.5058123217883,
						"acc_norm_stderr,none": 0.010179813857728832,
						"acc_stderr,none": 0.16447181578298306,
						"alias": "pythia",
						"bits_per_byte,none": 0.7558678955269331,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6886471419915159,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 6.75511211102174,
						"perplexity_stderr,none": 0.18227271984034885,
						"word_perplexity,none": 16.47225610931037,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.37943262411347517,
						"acc_norm,none": 0.4237588652482269,
						"acc_norm_stderr,none": 0.058460943966813936,
						"acc_stderr,none": 0.03871557967307651,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.5333333333333333,
						"acc_norm_stderr,none": 0.0457329560380023,
						"acc_stderr,none": 0.0451938453788867,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3625,
						"acc_norm,none": 0.41875,
						"acc_norm_stderr,none": 0.039125538756915115,
						"acc_stderr,none": 0.03812374340644892,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3732394366197183,
						"acc_norm,none": 0.38028169014084506,
						"acc_norm_stderr,none": 0.0288573637517583,
						"acc_stderr,none": 0.02875089548898921,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5070474098480688,
						"acc_stderr,none": 0.006764738496830989,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.48164729161513725,
						"acc_stderr,none": 0.002485024916596532,
						"alias": "qqp",
						"f1,none": 0.33543681623592836,
						"f1_stderr,none": 0.003435777824920645
					},
					"race": {
						"acc,none": 0.37607655502392345,
						"acc_stderr,none": 0.014991791489173295,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5667870036101083,
						"acc_stderr,none": 0.029826764082138277,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.894,
						"acc_norm,none": 0.839,
						"acc_norm_stderr,none": 0.011628164696727184,
						"acc_stderr,none": 0.009739551265785143,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5631768953068592,
						"acc_stderr,none": 0.029855247390314952,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.01694185368929243,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5088973308007597,
						"acc_norm,none": 0.6931920423872838,
						"acc_norm_stderr,none": 0.0032605496380094883,
						"acc_stderr,none": 0.003534532309312502,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5868357126218762,
						"acc_stderr,none": 0.0504320154297118,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6829927884615384,
						"acc_stderr,none": 0.004657062801250884,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.5919732441471572,
						"acc_stderr,none": 0.004947944329464063,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.4877450980392157,
						"acc_stderr,none": 0.004949493090105161,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.29115457709716386,
						"acc_stderr,none": 0.001635881417641523,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.38310893512851896,
						"bleu_acc_stderr,none": 0.017018461679389855,
						"bleu_diff,none": 1.3638964471210027,
						"bleu_diff_stderr,none": 0.8632433983524191,
						"bleu_max,none": 23.17592314469333,
						"bleu_max_stderr,none": 0.7705199444430939,
						"rouge1_acc,none": 0.379436964504284,
						"rouge1_acc_stderr,none": 0.016987039266142985,
						"rouge1_diff,none": 1.5514230336845742,
						"rouge1_diff_stderr,none": 1.1718627062272666,
						"rouge1_max,none": 47.240232935223176,
						"rouge1_max_stderr,none": 0.9284398908010626,
						"rouge2_acc,none": 0.3011015911872705,
						"rouge2_acc_stderr,none": 0.016058999026100612,
						"rouge2_diff,none": 1.5445848533033304,
						"rouge2_diff_stderr,none": 1.2442679354094344,
						"rouge2_max,none": 32.053173111877065,
						"rouge2_max_stderr,none": 1.0561632909441534,
						"rougeL_acc,none": 0.379436964504284,
						"rougeL_acc_stderr,none": 0.01698703926614298,
						"rougeL_diff,none": 1.6657629586959137,
						"rougeL_diff_stderr,none": 1.1786314310856658,
						"rougeL_max,none": 44.731411115557066,
						"rougeL_max_stderr,none": 0.9472465952140484
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.38310893512851896,
						"bleu_acc_stderr,none": 0.017018461679389855,
						"bleu_diff,none": 1.3638964471210027,
						"bleu_diff_stderr,none": 0.8632433983524191,
						"bleu_max,none": 23.17592314469333,
						"bleu_max_stderr,none": 0.7705199444430939,
						"rouge1_acc,none": 0.379436964504284,
						"rouge1_acc_stderr,none": 0.016987039266142985,
						"rouge1_diff,none": 1.5514230336845742,
						"rouge1_diff_stderr,none": 1.1718627062272666,
						"rouge1_max,none": 47.240232935223176,
						"rouge1_max_stderr,none": 0.9284398908010626,
						"rouge2_acc,none": 0.3011015911872705,
						"rouge2_acc_stderr,none": 0.016058999026100612,
						"rouge2_diff,none": 1.5445848533033304,
						"rouge2_diff_stderr,none": 1.2442679354094344,
						"rouge2_max,none": 32.053173111877065,
						"rouge2_max_stderr,none": 1.0561632909441534,
						"rougeL_acc,none": 0.379436964504284,
						"rougeL_acc_stderr,none": 0.01698703926614298,
						"rougeL_diff,none": 1.6657629586959137,
						"rougeL_diff_stderr,none": 1.1786314310856658,
						"rougeL_max,none": 44.731411115557066,
						"rougeL_max_stderr,none": 0.9472465952140484
					},
					"truthfulqa_mc1": {
						"acc,none": 0.21542227662178703,
						"acc_stderr,none": 0.014391902652427681,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.36688687757254074,
						"acc_stderr,none": 0.01397983317212303,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.0718503937007874,
						"exact_match_stderr,none": 0.005730184515948714
					},
					"wic": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.01981072129375818,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.7559545336721784,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6887485533398678,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 16.477546700595973,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.5935280189423836,
						"acc_stderr,none": 0.013804448697753375,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5492957746478874,
						"acc_stderr,none": 0.05947027187737999,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.38461538461538464,
						"acc_stderr,none": 0.0479366886807504,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.6959706959706959,
						"acc_stderr,none": 0.02789129939715294,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5592727272727273,
						"acc_stderr,none": 0.05449048831875534,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.468,
						"acc_stderr,none": 0.022337186479044296,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.02238235778196213,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.672,
						"acc_stderr,none": 0.02101702716517549,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044296,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.022033677993740865,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.02204949796982787,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231333,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.67,
						"acc_stderr,none": 0.02104961216613481,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.02185468495561126,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.37269076305220883,
						"acc_stderr,none": 0.03472708129682387,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3305220883534137,
						"acc_stderr,none": 0.009428789109289827,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3405622489959839,
						"acc_stderr,none": 0.009498886690274443,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.38795180722891565,
						"acc_stderr,none": 0.009767181346586388,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.009448900914617626,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.45582329317269077,
						"acc_stderr,none": 0.009982878443738422,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.40843373493975904,
						"acc_stderr,none": 0.009852581919032238,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.40240963855421685,
						"acc_stderr,none": 0.009829321288467446,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.35180722891566263,
						"acc_stderr,none": 0.009571764897113621,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.39036144578313253,
						"acc_stderr,none": 0.009778161879954572,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3469879518072289,
						"acc_stderr,none": 0.0095412515615684,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42088353413654617,
						"acc_stderr,none": 0.0098958129140522,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.342570281124498,
						"acc_stderr,none": 0.00951233331947038,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.009460223484996469,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.4072289156626506,
						"acc_stderr,none": 0.009848052628967666,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3365461847389558,
						"acc_stderr,none": 0.009471423054177136,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5490042717044703,
						"acc_stderr,none": 0.058819532506748554,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.4798146922567836,
						"acc_stderr,none": 0.01285663570649829,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7061548643282595,
						"acc_stderr,none": 0.011722513910985199,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5658504301786896,
						"acc_stderr,none": 0.01275504628991222,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.4983454665784249,
						"acc_stderr,none": 0.012867054869163346,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.49503639973527463,
						"acc_stderr,none": 0.012866491277589938,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6293845135671741,
						"acc_stderr,none": 0.012428861084065905,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5512905360688286,
						"acc_stderr,none": 0.012799246690109753,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5043017868960953,
						"acc_stderr,none": 0.012866649085718846,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.49503639973527463,
						"acc_stderr,none": 0.012866491277589943,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5195234943745863,
						"acc_stderr,none": 0.012857312531836864,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5943084050297816,
						"acc_stderr,none": 0.012636170220503926,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.6781299168352439,
						"acc_stderr,none": 0.050686065221542786,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.7565591397849463,
						"acc_stderr,none": 0.008902256762767003,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.5662650602409639,
						"acc_stderr,none": 0.05472870359742141,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5516162669447341,
						"acc_stderr,none": 0.016067958526765066,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.5855513307984791,
						"acc_stderr,none": 0.030434573161228058,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.546031746031746,
						"acc_stderr,none": 0.028096800277810523,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7063492063492064,
						"acc_stderr,none": 0.020306792341159757,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "aisingapore/sealion3b"
	},
	"aisingapore/sealion7b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5591882750845547,
						"acc_norm,none": 0.5431228861330327,
						"acc_norm_stderr,none": 0.09362124783440262,
						"acc_stderr,none": 0.11387236254310973,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3478125,
						"acc_stderr,none": 0.017020251675974384,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0379,
						"acc_stderr,none": 0.03349546577024505,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8012835820895522,
						"acc_stderr,none": 0.17626151413085223,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2555720653789005,
						"acc_norm,none": 0.2555720653789005,
						"acc_norm_stderr,none": 0.11335554963306066,
						"acc_stderr,none": 0.11335554963306066,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2534104645139008,
						"acc_norm,none": 0.2534104645139008,
						"acc_norm_stderr,none": 0.04156927101542444,
						"acc_stderr,none": 0.04156927101542444,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.553890876565295,
						"likelihood_diff_stderr,none": 0.5524742956277262,
						"pct_stereotype,none": 0.5384615384615385,
						"pct_stereotype_stderr,none": 0.09667237572350902
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.029035433070866142,
						"exact_match_stderr,none": 0.0037257257477227076
					},
					"glue": {
						"acc,none": 0.4483236064792758,
						"acc_stderr,none": 0.002182704807270729,
						"alias": "glue",
						"f1,none": 0.41216462120993264,
						"f1_stderr,none": 0.0011659546595266493,
						"mcc,none": 0.005746920543176395,
						"mcc_stderr,none": 0.03167215385206039
					},
					"kmmlu": {
						"acc,none": 0.2977476176725383,
						"acc_norm,none": 0.2977476176725383,
						"acc_norm_stderr,none": 0.027565204887059408,
						"acc_stderr,none": 0.027565204887059408,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.48717386538039903,
						"acc_norm,none": 0.472,
						"acc_norm_stderr,none": 0.0004994308617234501,
						"acc_stderr,none": 0.032661493746392774,
						"alias": "kobest",
						"f1,none": 0.37787480129001955,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6081894042305453,
						"acc_stderr,none": 0.016678153568244862,
						"alias": "lambada",
						"perplexity,none": 5.999183778951908,
						"perplexity_stderr,none": 0.3883438680093735
					},
					"lambada_cloze": {
						"acc,none": 0.05637492722685814,
						"acc_stderr,none": 0.0036339845908643863,
						"alias": "lambada_cloze",
						"perplexity,none": 410.5597435610774,
						"perplexity_stderr,none": 64.76038798613087
					},
					"lambada_multilingual": {
						"acc,none": 0.3563361148845332,
						"acc_stderr,none": 0.10880958042354162,
						"alias": "lambada_multilingual",
						"perplexity,none": 208.75737924701525,
						"perplexity_stderr,none": 86.70867592886836
					},
					"mmlu": {
						"acc,none": 0.27047429141147983,
						"acc_stderr,none": 0.0414814066692418,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.27460148777895854,
						"acc_stderr,none": 0.03138221628907931,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.25973607981976177,
						"acc_stderr,none": 0.03792335098725098,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.26714332141696456,
						"acc_stderr,none": 0.048526195180326866,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2781477957500793,
						"acc_stderr,none": 0.048696774228994315,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2807665010645848,
						"acc_norm,none": 0.2526954308110447,
						"acc_norm_stderr,none": 8.253874567293189e-05,
						"acc_stderr,none": 0.08897447239000647,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5021428571428571,
						"acc_stderr,none": 0.0348189958749277,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.6979504521700922,
						"acc_norm,none": 0.5494688984026932,
						"acc_norm_stderr,none": 0.010934720326948854,
						"acc_stderr,none": 0.16458027044509774,
						"alias": "pythia",
						"bits_per_byte,none": 0.7111912590639013,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6371553876002254,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.281544042643734,
						"perplexity_stderr,none": 0.12533612783107617,
						"word_perplexity,none": 13.958385271895754,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.39361702127659576,
						"acc_norm,none": 0.4432624113475177,
						"acc_norm_stderr,none": 0.055356432129099506,
						"acc_stderr,none": 0.04456070060427784,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5729260257562144,
						"acc_stderr,none": 0.032579735141498145,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.2782728331670533,
						"acc_stderr,none": 0.001510473801712159,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2962056303549572,
						"bleu_acc_stderr,none": 0.015983595101811396,
						"bleu_diff,none": -9.624485667430166,
						"bleu_diff_stderr,none": 0.7856752160102788,
						"bleu_max,none": 22.931271947400514,
						"bleu_max_stderr,none": 0.7146900684034068,
						"rouge1_acc,none": 0.2558139534883721,
						"rouge1_acc_stderr,none": 0.01527417621928336,
						"rouge1_diff,none": -11.440333526932625,
						"rouge1_diff_stderr,none": 0.8699394714213237,
						"rouge1_max,none": 47.36490881253339,
						"rouge1_max_stderr,none": 0.8636030885388936,
						"rouge2_acc,none": 0.19216646266829865,
						"rouge2_acc_stderr,none": 0.013792870480628954,
						"rouge2_diff,none": -14.349105910615792,
						"rouge2_diff_stderr,none": 1.0130096165493152,
						"rouge2_max,none": 30.198128763581725,
						"rouge2_max_stderr,none": 0.970764485583704,
						"rougeL_acc,none": 0.2460220318237454,
						"rougeL_acc_stderr,none": 0.015077219200662583,
						"rougeL_diff,none": -11.883965318286142,
						"rougeL_diff_stderr,none": 0.8760767860621038,
						"rougeL_max,none": 44.718745125405206,
						"rougeL_max_stderr,none": 0.8693314562635015
					},
					"xcopa": {
						"acc,none": 0.5598181818181818,
						"acc_stderr,none": 0.06067967436589544,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.3840160642570281,
						"acc_stderr,none": 0.04838130116000915,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5550809217255279,
						"acc_stderr,none": 0.05596046996282288,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7318498538997528,
						"acc_stderr,none": 0.06388725733791883,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5591882750845547,
						"acc_norm,none": 0.5431228861330327,
						"acc_norm_stderr,none": 0.09362124783440262,
						"acc_stderr,none": 0.11387236254310973,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3478125,
						"acc_stderr,none": 0.017020251675974384,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.329,
						"acc_stderr,none": 0.014865395385928367,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.359,
						"acc_stderr,none": 0.015177264224798594,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3541666666666667,
						"acc_stderr,none": 0.013811933499570958,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.318259385665529,
						"acc_norm,none": 0.3455631399317406,
						"acc_norm_stderr,none": 0.013896938461145677,
						"acc_stderr,none": 0.013611993916971451,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.678030303030303,
						"acc_norm,none": 0.6405723905723906,
						"acc_norm_stderr,none": 0.009845958893373759,
						"acc_stderr,none": 0.009587386696300382,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0379,
						"acc_stderr,none": 0.03349546577024505,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.069,
						"acc_stderr,none": 0.005668824197652671,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0875,
						"acc_stderr,none": 0.006319956164639147,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0925,
						"acc_stderr,none": 0.006480190694394497,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.107,
						"acc_stderr,none": 0.006913710993370312,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0055,
						"acc_stderr,none": 0.0016541593398342208,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0165,
						"acc_stderr,none": 0.002849198828966353,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000022,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000151,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.004772234273318872,
						"acc_stderr,none": 0.0014357568013434081,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8012835820895522,
						"acc_stderr,none": 0.17626151413085223,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103313,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099188,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.00314800093867677,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.01265543994336666,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.00891686663074592,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.741,
						"acc_stderr,none": 0.01386041525752791,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.589,
						"acc_stderr,none": 0.015566673418599271,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.013019735539307813,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357793,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298293,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306513,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163039,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406103,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666693,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662758,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286441,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274699,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.012583693787968137,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.699,
						"acc_stderr,none": 0.014512395033543147,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.014606483127342763,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.822,
						"acc_stderr,none": 0.012102167676183575,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244068,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.845,
						"acc_stderr,none": 0.011450157470799457,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178336,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.229,
						"acc_stderr,none": 0.013294199326613604,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.01279561361278655,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.597,
						"acc_stderr,none": 0.015518757419066536,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.01404625563263392,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.002637794146243761,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.845,
						"acc_stderr,none": 0.011450157470799464,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333345,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151108,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.015090650341444235,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504382,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.294,
						"acc_stderr,none": 0.014414290540008215,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.581,
						"acc_stderr,none": 0.015610338967577804,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.521,
						"acc_stderr,none": 0.015805341148131296,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.795,
						"acc_stderr,none": 0.012772554096113116,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.635,
						"acc_stderr,none": 0.015231776226264898,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653869,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651506,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.666,
						"acc_stderr,none": 0.014922019523732968,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697598,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469362,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.011772110370812182,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.769,
						"acc_stderr,none": 0.013334797216936431,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.374,
						"acc_stderr,none": 0.01530876736900636,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118585,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248085,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298102,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.014683991951087948,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.432,
						"acc_stderr,none": 0.015672320237336203,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963527993,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.010878848714333308,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.617,
						"acc_stderr,none": 0.015380102325652699,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475279,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724454,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.013127502859696235,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.844,
						"acc_stderr,none": 0.011480235006122361,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.00779973306183202,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.00868051561552374,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.971,
						"acc_stderr,none": 0.005309160685756954,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679195,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.336,
						"acc_stderr,none": 0.014944140233795018,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.272,
						"acc_stderr,none": 0.014078856992462621,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6516819571865443,
						"acc_stderr,none": 0.008332942286688306,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.42857142857142855,
						"acc_stderr,none": 0.06672848092813057,
						"alias": "cb",
						"f1,none": 0.34309868875086263,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2555720653789005,
						"acc_norm,none": 0.2555720653789005,
						"acc_norm_stderr,none": 0.11335554963306066,
						"acc_stderr,none": 0.11335554963306066,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.06652247352247599,
						"acc_stderr,none": 0.06652247352247599,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757575,
						"acc_stderr,none": 0.07575757575757575,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.09090909090909091,
						"acc_norm,none": 0.09090909090909091,
						"acc_norm_stderr,none": 0.05081972676135889,
						"acc_stderr,none": 0.05081972676135889,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.06429065810876616,
						"acc_stderr,none": 0.06429065810876616,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.05443310539518173,
						"acc_stderr,none": 0.05443310539518173,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522561,
						"acc_stderr,none": 0.11369720523522561,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.1875,
						"acc_norm,none": 0.1875,
						"acc_norm_stderr,none": 0.10077822185373188,
						"acc_stderr,none": 0.10077822185373188,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.08446516354424752,
						"acc_stderr,none": 0.08446516354424752,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502246,
						"acc_stderr,none": 0.07401656182502246,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.1008316903303367,
						"acc_stderr,none": 0.1008316903303367,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.09829463743659808,
						"acc_stderr,none": 0.09829463743659808,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.10154334054280735,
						"acc_stderr,none": 0.10154334054280735,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.1049727762162956,
						"acc_stderr,none": 0.1049727762162956,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.10094660663590604,
						"acc_stderr,none": 0.10094660663590604,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.14213381090374033,
						"acc_stderr,none": 0.14213381090374033,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755133,
						"acc_stderr,none": 0.08780518530755133,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.43478260869565216,
						"acc_norm,none": 0.43478260869565216,
						"acc_norm_stderr,none": 0.10568965974008647,
						"acc_stderr,none": 0.10568965974008647,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141221,
						"acc_stderr,none": 0.06372446937141221,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.12126781251816651,
						"acc_stderr,none": 0.12126781251816651,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.20689655172413793,
						"acc_norm,none": 0.20689655172413793,
						"acc_norm_stderr,none": 0.07655305550699533,
						"acc_stderr,none": 0.07655305550699533,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966263,
						"acc_stderr,none": 0.06520506636966263,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.05881787629278457,
						"acc_stderr,none": 0.05881787629278457,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.2391304347826087,
						"acc_norm,none": 0.2391304347826087,
						"acc_norm_stderr,none": 0.06358669845936323,
						"acc_stderr,none": 0.06358669845936323,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2534104645139008,
						"acc_norm,none": 0.2534104645139008,
						"acc_norm_stderr,none": 0.04156927101542444,
						"acc_stderr,none": 0.04156927101542444,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.27218934911242604,
						"acc_norm,none": 0.27218934911242604,
						"acc_norm_stderr,none": 0.03433919627548535,
						"acc_stderr,none": 0.03433919627548535,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.03538668490313389,
						"acc_stderr,none": 0.03538668490313389,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.03304756158810786,
						"acc_stderr,none": 0.03304756158810786,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03346409881055953,
						"acc_stderr,none": 0.03346409881055953,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.28708133971291866,
						"acc_norm,none": 0.28708133971291866,
						"acc_norm_stderr,none": 0.031368287214891676,
						"acc_stderr,none": 0.031368287214891676,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.21875,
						"acc_norm,none": 0.21875,
						"acc_norm_stderr,none": 0.032784644885244255,
						"acc_stderr,none": 0.032784644885244255,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.25190839694656486,
						"acc_norm,none": 0.25190839694656486,
						"acc_norm_stderr,none": 0.038073871163060866,
						"acc_stderr,none": 0.038073871163060866,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.20588235294117646,
						"acc_norm,none": 0.20588235294117646,
						"acc_norm_stderr,none": 0.03480046931235067,
						"acc_stderr,none": 0.03480046931235067,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.308411214953271,
						"acc_norm,none": 0.308411214953271,
						"acc_norm_stderr,none": 0.04485760883316699,
						"acc_stderr,none": 0.04485760883316699,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.24458204334365324,
						"acc_norm,none": 0.24458204334365324,
						"acc_norm_stderr,none": 0.023953997540932172,
						"acc_stderr,none": 0.023953997540932172,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.25980392156862747,
						"acc_norm,none": 0.25980392156862747,
						"acc_norm_stderr,none": 0.030778554678693244,
						"acc_stderr,none": 0.030778554678693244,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.24581005586592178,
						"acc_norm,none": 0.24581005586592178,
						"acc_norm_stderr,none": 0.032272320235412995,
						"acc_stderr,none": 0.032272320235412995,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24472573839662448,
						"acc_norm,none": 0.24472573839662448,
						"acc_norm_stderr,none": 0.027985699387036423,
						"acc_stderr,none": 0.027985699387036423,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.04350546818999061,
						"acc_stderr,none": 0.04350546818999061,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.16822429906542055,
						"acc_norm,none": 0.16822429906542055,
						"acc_norm_stderr,none": 0.036332438371418335,
						"acc_stderr,none": 0.036332438371418335,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.0430254877395901,
						"acc_stderr,none": 0.0430254877395901,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.3148148148148148,
						"acc_norm,none": 0.3148148148148148,
						"acc_norm_stderr,none": 0.04489931073591311,
						"acc_stderr,none": 0.04489931073591311,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.04117581097845101,
						"acc_stderr,none": 0.04117581097845101,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800375,
						"acc_stderr,none": 0.04142972007800375,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.025546583236733533,
						"acc_stderr,none": 0.025546583236733533,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.25980392156862747,
						"acc_norm,none": 0.25980392156862747,
						"acc_norm_stderr,none": 0.030778554678693268,
						"acc_stderr,none": 0.030778554678693268,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.031267817146631786,
						"acc_stderr,none": 0.031267817146631786,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.272108843537415,
						"acc_norm,none": 0.272108843537415,
						"acc_norm_stderr,none": 0.036832239154550236,
						"acc_stderr,none": 0.036832239154550236,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.22302158273381295,
						"acc_norm,none": 0.22302158273381295,
						"acc_norm_stderr,none": 0.035435484995619396,
						"acc_stderr,none": 0.035435484995619396,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.27672955974842767,
						"acc_norm,none": 0.27672955974842767,
						"acc_norm_stderr,none": 0.03559177035707934,
						"acc_stderr,none": 0.03559177035707934,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2392638036809816,
						"acc_norm,none": 0.2392638036809816,
						"acc_norm_stderr,none": 0.033519538795212696,
						"acc_stderr,none": 0.033519538795212696,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.26744186046511625,
						"acc_norm,none": 0.26744186046511625,
						"acc_norm_stderr,none": 0.03384836428157859,
						"acc_stderr,none": 0.03384836428157859,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.21428571428571427,
						"acc_norm,none": 0.21428571428571427,
						"acc_norm_stderr,none": 0.025899541362425026,
						"acc_stderr,none": 0.025899541362425026,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03173071239071724,
						"acc_stderr,none": 0.03173071239071724,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.02865749128507199,
						"acc_stderr,none": 0.02865749128507199,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.029017133559381264,
						"acc_stderr,none": 0.029017133559381264,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.035914440841969694,
						"acc_stderr,none": 0.035914440841969694,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.26573426573426573,
						"acc_norm,none": 0.26573426573426573,
						"acc_norm_stderr,none": 0.037068604626235575,
						"acc_stderr,none": 0.037068604626235575,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032732683535398856,
						"acc_stderr,none": 0.032732683535398856,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2483221476510067,
						"acc_norm,none": 0.2483221476510067,
						"acc_norm_stderr,none": 0.03551344041697433,
						"acc_stderr,none": 0.03551344041697433,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.22485207100591717,
						"acc_norm,none": 0.22485207100591717,
						"acc_norm_stderr,none": 0.032209657045145244,
						"acc_stderr,none": 0.032209657045145244,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.0374425492857706,
						"acc_stderr,none": 0.0374425492857706,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.19491525423728814,
						"acc_norm,none": 0.19491525423728814,
						"acc_norm_stderr,none": 0.0366227356760915,
						"acc_stderr,none": 0.0366227356760915,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.03494959016177541,
						"acc_stderr,none": 0.03494959016177541,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.04265792110940588,
						"acc_stderr,none": 0.04265792110940588,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695624,
						"acc_stderr,none": 0.03737392962695624,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.04006168083848877,
						"acc_stderr,none": 0.04006168083848877,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2810810810810811,
						"acc_norm,none": 0.2810810810810811,
						"acc_norm_stderr,none": 0.03313956873549873,
						"acc_stderr,none": 0.03313956873549873,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.29069767441860467,
						"acc_norm,none": 0.29069767441860467,
						"acc_norm_stderr,none": 0.034724693044776,
						"acc_stderr,none": 0.034724693044776,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26277372262773724,
						"acc_norm,none": 0.26277372262773724,
						"acc_norm_stderr,none": 0.021736991810864862,
						"acc_stderr,none": 0.021736991810864862,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.26635514018691586,
						"acc_norm,none": 0.26635514018691586,
						"acc_norm_stderr,none": 0.030288912386133217,
						"acc_stderr,none": 0.030288912386133217,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2845528455284553,
						"acc_norm,none": 0.2845528455284553,
						"acc_norm_stderr,none": 0.04084983733239222,
						"acc_stderr,none": 0.04084983733239222,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.319672131147541,
						"acc_norm,none": 0.319672131147541,
						"acc_norm_stderr,none": 0.04239540943837381,
						"acc_stderr,none": 0.04239540943837381,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.03023199042074987,
						"acc_stderr,none": 0.03023199042074987,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.031073844843659416,
						"acc_stderr,none": 0.031073844843659416,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03294754314388875,
						"acc_stderr,none": 0.03294754314388875,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.037245636197746325,
						"acc_stderr,none": 0.037245636197746325,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.0433629090391994,
						"acc_stderr,none": 0.0433629090391994,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2914285714285714,
						"acc_norm,none": 0.2914285714285714,
						"acc_norm_stderr,none": 0.03444952656229018,
						"acc_stderr,none": 0.03444952656229018,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27014218009478674,
						"acc_norm,none": 0.27014218009478674,
						"acc_norm_stderr,none": 0.030641194076293152,
						"acc_stderr,none": 0.030641194076293152,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2154255319148936,
						"acc_norm,none": 0.2154255319148936,
						"acc_norm_stderr,none": 0.021230002173909638,
						"acc_stderr,none": 0.021230002173909638,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.24568965517241378,
						"acc_norm,none": 0.24568965517241378,
						"acc_norm_stderr,none": 0.02832451468417116,
						"acc_stderr,none": 0.02832451468417116,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.03253413848482255,
						"acc_stderr,none": 0.03253413848482255,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.034554737023254366,
						"acc_stderr,none": 0.034554737023254366,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.25663716814159293,
						"acc_norm,none": 0.25663716814159293,
						"acc_norm_stderr,none": 0.02911849599823728,
						"acc_stderr,none": 0.02911849599823728,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03346409881055953,
						"acc_stderr,none": 0.03346409881055953,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.033330686633366996,
						"acc_stderr,none": 0.033330686633366996,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.22981366459627328,
						"acc_norm,none": 0.22981366459627328,
						"acc_norm_stderr,none": 0.0332602751192305,
						"acc_stderr,none": 0.0332602751192305,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.034893706520187605,
						"acc_stderr,none": 0.034893706520187605,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.005746920543176395,
						"mcc_stderr,none": 0.03167215385206039
					},
					"copa": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.04093601807403326,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.553890876565295,
						"likelihood_diff_stderr,none": 0.5524742956277262,
						"pct_stereotype,none": 0.5384615384615385,
						"pct_stereotype_stderr,none": 0.09667237572350902
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.35494931425164,
						"likelihood_diff_stderr,none": 0.08319062358930267,
						"pct_stereotype,none": 0.614788312462731,
						"pct_stereotype_stderr,none": 0.011887089206792469
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.6373626373626373,
						"likelihood_diff_stderr,none": 0.3742720597764413,
						"pct_stereotype,none": 0.6263736263736264,
						"pct_stereotype_stderr,none": 0.0509934316638677
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.409090909090909,
						"likelihood_diff_stderr,none": 2.093353314463078,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.0346153846153845,
						"likelihood_diff_stderr,none": 0.647152530651773,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.05769230769230768
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.45078125,
						"likelihood_diff_stderr,none": 0.16065612934292192,
						"pct_stereotype,none": 0.64375,
						"pct_stereotype_stderr,none": 0.026812710310024235
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.2511574074074074,
						"likelihood_diff_stderr,none": 0.21273134089496454,
						"pct_stereotype,none": 0.5555555555555556,
						"pct_stereotype_stderr,none": 0.03388857118502326
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.2552083333333335,
						"likelihood_diff_stderr,none": 0.2813443970867464,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.109498031496063,
						"likelihood_diff_stderr,none": 0.13999457038199015,
						"pct_stereotype,none": 0.5039370078740157,
						"pct_stereotype_stderr,none": 0.02220509119300217
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.5281531531531534,
						"likelihood_diff_stderr,none": 0.3289057063488387,
						"pct_stereotype,none": 0.7297297297297297,
						"pct_stereotype_stderr,none": 0.042343213610845386
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.153225806451613,
						"likelihood_diff_stderr,none": 0.42139438444186456,
						"pct_stereotype,none": 0.8602150537634409,
						"pct_stereotype_stderr,none": 0.036152622588464155
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.026973684210526,
						"likelihood_diff_stderr,none": 0.24108490948272931,
						"pct_stereotype,none": 0.6473684210526316,
						"pct_stereotype_stderr,none": 0.03475405259582096
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.7528324388789507,
						"likelihood_diff_stderr,none": 0.09443445324613586,
						"pct_stereotype,none": 0.46213476446034585,
						"pct_stereotype_stderr,none": 0.012178226587918596
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 4.1305555555555555,
						"likelihood_diff_stderr,none": 0.7027259631632007,
						"pct_stereotype,none": 0.4222222222222222,
						"pct_stereotype_stderr,none": 0.05235473399540657
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.2884615384615383,
						"likelihood_diff_stderr,none": 0.8705685190443571,
						"pct_stereotype,none": 0.38461538461538464,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.223484848484849,
						"likelihood_diff_stderr,none": 0.5051924091632697,
						"pct_stereotype,none": 0.5303030303030303,
						"pct_stereotype_stderr,none": 0.06190336468479955
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.0257009345794392,
						"likelihood_diff_stderr,none": 0.16586115363972057,
						"pct_stereotype,none": 0.5264797507788161,
						"pct_stereotype_stderr,none": 0.02791162519893664
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.862648221343873,
						"likelihood_diff_stderr,none": 0.242961639626642,
						"pct_stereotype,none": 0.28063241106719367,
						"pct_stereotype_stderr,none": 0.02830375633589041
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 4.027777777777778,
						"likelihood_diff_stderr,none": 0.4719114322947124,
						"pct_stereotype,none": 0.4444444444444444,
						"pct_stereotype_stderr,none": 0.05897165471491952
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.0266304347826085,
						"likelihood_diff_stderr,none": 0.1509416980711582,
						"pct_stereotype,none": 0.3978260869565217,
						"pct_stereotype_stderr,none": 0.02284553090038966
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.6456521739130436,
						"likelihood_diff_stderr,none": 0.36095927220775226,
						"pct_stereotype,none": 0.5826086956521739,
						"pct_stereotype_stderr,none": 0.04618572379512261
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 5.368131868131868,
						"likelihood_diff_stderr,none": 0.424457217487365,
						"pct_stereotype,none": 0.7692307692307693,
						"pct_stereotype_stderr,none": 0.04441155916843278
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.7895408163265305,
						"likelihood_diff_stderr,none": 0.28238124850994123,
						"pct_stereotype,none": 0.5357142857142857,
						"pct_stereotype_stderr,none": 0.03571428571428571
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.029035433070866142,
						"exact_match_stderr,none": 0.0037257257477227076
					},
					"glue": {
						"acc,none": 0.4483236064792758,
						"acc_stderr,none": 0.002182704807270729,
						"alias": "glue",
						"f1,none": 0.41216462120993264,
						"f1_stderr,none": 0.0011659546595266493,
						"mcc,none": 0.005746920543176395,
						"mcc_stderr,none": 0.03167215385206039
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.02047005307050796,
						"exact_match_stderr,get-answer": 0.0039004133859157192
					},
					"hellaswag": {
						"acc,none": 0.5014937263493328,
						"acc_norm,none": 0.6699860585540729,
						"acc_norm_stderr,none": 0.0046925676559617735,
						"acc_stderr,none": 0.004989759144812295,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2977476176725383,
						"acc_norm,none": 0.2977476176725383,
						"acc_norm_stderr,none": 0.027565204887059408,
						"acc_stderr,none": 0.027565204887059408,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.046056618647183814,
						"acc_stderr,none": 0.046056618647183814,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.292,
						"acc_norm,none": 0.292,
						"acc_norm_stderr,none": 0.01438551156347734,
						"acc_stderr,none": 0.01438551156347734,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.314,
						"acc_norm,none": 0.314,
						"acc_norm_stderr,none": 0.014683991951087966,
						"acc_stderr,none": 0.014683991951087966,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.01386041525752791,
						"acc_stderr,none": 0.01386041525752791,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633915,
						"acc_stderr,none": 0.014046255632633915,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.2816666666666667,
						"acc_norm,none": 0.2816666666666667,
						"acc_norm_stderr,none": 0.01837880736590156,
						"acc_stderr,none": 0.01837880736590156,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.336,
						"acc_norm,none": 0.336,
						"acc_norm_stderr,none": 0.014944140233795021,
						"acc_stderr,none": 0.014944140233795021,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.303,
						"acc_norm,none": 0.303,
						"acc_norm_stderr,none": 0.014539683710535262,
						"acc_stderr,none": 0.014539683710535262,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.322,
						"acc_norm,none": 0.322,
						"acc_norm_stderr,none": 0.014782913600996683,
						"acc_stderr,none": 0.014782913600996683,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.031093957143700265,
						"acc_stderr,none": 0.031093957143700265,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.326,
						"acc_norm,none": 0.326,
						"acc_norm_stderr,none": 0.014830507204541042,
						"acc_stderr,none": 0.014830507204541042,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2153846153846154,
						"acc_norm,none": 0.2153846153846154,
						"acc_norm_stderr,none": 0.03619435936612662,
						"acc_stderr,none": 0.03619435936612662,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.336,
						"acc_norm,none": 0.336,
						"acc_norm_stderr,none": 0.014944140233795018,
						"acc_stderr,none": 0.014944140233795018,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.324,
						"acc_norm,none": 0.324,
						"acc_norm_stderr,none": 0.01480686473373886,
						"acc_stderr,none": 0.01480686473373886,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.287,
						"acc_norm,none": 0.287,
						"acc_norm_stderr,none": 0.014312087053809961,
						"acc_stderr,none": 0.014312087053809961,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.315,
						"acc_norm,none": 0.315,
						"acc_norm_stderr,none": 0.014696631960792505,
						"acc_stderr,none": 0.014696631960792505,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.014428554438445505,
						"acc_stderr,none": 0.014428554438445505,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306263,
						"acc_stderr,none": 0.014158794845306263,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.014205696104091494,
						"acc_stderr,none": 0.014205696104091494,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.01451239503354316,
						"acc_stderr,none": 0.01451239503354316,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.318,
						"acc_norm,none": 0.318,
						"acc_norm_stderr,none": 0.014734079309311901,
						"acc_stderr,none": 0.014734079309311901,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.299,
						"acc_norm,none": 0.299,
						"acc_norm_stderr,none": 0.014484778521220461,
						"acc_stderr,none": 0.014484778521220461,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.337,
						"acc_norm,none": 0.337,
						"acc_norm_stderr,none": 0.0149550879186536,
						"acc_stderr,none": 0.0149550879186536,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462611,
						"acc_stderr,none": 0.014078856992462611,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.311,
						"acc_norm,none": 0.311,
						"acc_norm_stderr,none": 0.014645596385722695,
						"acc_stderr,none": 0.014645596385722695,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485247,
						"acc_stderr,none": 0.014174516461485247,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.30666666666666664,
						"acc_norm,none": 0.30666666666666664,
						"acc_norm_stderr,none": 0.018840434540100308,
						"acc_stderr,none": 0.018840434540100308,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.309,
						"acc_norm,none": 0.309,
						"acc_norm_stderr,none": 0.014619600977206486,
						"acc_stderr,none": 0.014619600977206486,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.014221154708434937,
						"acc_stderr,none": 0.014221154708434937,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.325,
						"acc_norm,none": 0.325,
						"acc_norm_stderr,none": 0.014818724459095524,
						"acc_stderr,none": 0.014818724459095524,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.01414298497574067,
						"acc_stderr,none": 0.01414298497574067,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.31,
						"acc_norm,none": 0.31,
						"acc_norm_stderr,none": 0.04648231987117316,
						"acc_stderr,none": 0.04648231987117316,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.2733333333333333,
						"acc_norm,none": 0.2733333333333333,
						"acc_norm_stderr,none": 0.025773792282785975,
						"acc_stderr,none": 0.025773792282785975,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.249,
						"acc_norm,none": 0.249,
						"acc_norm_stderr,none": 0.013681600278702305,
						"acc_stderr,none": 0.013681600278702305,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.317,
						"acc_norm,none": 0.317,
						"acc_norm_stderr,none": 0.014721675438880224,
						"acc_stderr,none": 0.014721675438880224,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.014282120955200482,
						"acc_stderr,none": 0.014282120955200482,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.032166339033750324,
						"acc_stderr,none": 0.032166339033750324,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.014428554438445516,
						"acc_stderr,none": 0.014428554438445516,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145156,
						"acc_stderr,none": 0.013979965645145156,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.031093957143700265,
						"acc_stderr,none": 0.031093957143700265,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.32,
						"acc_norm,none": 0.32,
						"acc_norm_stderr,none": 0.014758652303574897,
						"acc_stderr,none": 0.014758652303574897,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.48717386538039903,
						"acc_norm,none": 0.472,
						"acc_norm_stderr,none": 0.0004994308617234501,
						"acc_stderr,none": 0.032661493746392774,
						"alias": "kobest",
						"f1,none": 0.37787480129001955,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5028490028490028,
						"acc_stderr,none": 0.013348550797680823,
						"alias": " - kobest_boolq",
						"f1,none": 0.3371320037986705,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.509,
						"acc_stderr,none": 0.015816736995005392,
						"alias": " - kobest_copa",
						"f1,none": 0.5080904592591702,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.382,
						"acc_norm,none": 0.472,
						"acc_norm_stderr,none": 0.0223479498326681,
						"acc_stderr,none": 0.021750820591250827,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.37688247049428225,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5062972292191436,
						"acc_stderr,none": 0.02512395255890725,
						"alias": " - kobest_sentineg",
						"f1,none": 0.35350614822200066,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6081894042305453,
						"acc_stderr,none": 0.016678153568244862,
						"alias": "lambada",
						"perplexity,none": 5.999183778951908,
						"perplexity_stderr,none": 0.3883438680093735
					},
					"lambada_cloze": {
						"acc,none": 0.05637492722685814,
						"acc_stderr,none": 0.0036339845908643863,
						"alias": "lambada_cloze",
						"perplexity,none": 410.5597435610774,
						"perplexity_stderr,none": 64.76038798613087
					},
					"lambada_multilingual": {
						"acc,none": 0.3563361148845332,
						"acc_stderr,none": 0.10880958042354162,
						"alias": "lambada_multilingual",
						"perplexity,none": 208.75737924701525,
						"perplexity_stderr,none": 86.70867592886836
					},
					"lambada_openai": {
						"acc,none": 0.6376867843974384,
						"acc_stderr,none": 0.006696654128325645,
						"alias": " - lambada_openai",
						"perplexity,none": 5.281544042643734,
						"perplexity_stderr,none": 0.12533612783107617
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.05977100718028333,
						"acc_stderr,none": 0.0033027384259778528,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 285.11164230457746,
						"perplexity_stderr,none": 10.284162195397057
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.238889967009509,
						"acc_stderr,none": 0.005940660742100604,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 319.38039610555927,
						"perplexity_stderr,none": 20.169289012574698
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6372986609741897,
						"acc_stderr,none": 0.0066982006844884595,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 5.276730670132765,
						"perplexity_stderr,none": 0.1255002220212709
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.2844944692412187,
						"acc_stderr,none": 0.0062857265569445,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 258.0827067880811,
						"perplexity_stderr,none": 15.792830406360725
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.3341742674170386,
						"acc_stderr,none": 0.006571717150557816,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 149.98921351585147,
						"perplexity_stderr,none": 8.88738019371729
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.28682320978071024,
						"acc_stderr,none": 0.0063011209953543045,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 311.05784915545155,
						"perplexity_stderr,none": 20.29376169098645
					},
					"lambada_standard": {
						"acc,none": 0.5777217155055308,
						"acc_stderr,none": 0.006881304773376882,
						"alias": " - lambada_standard",
						"perplexity,none": 6.715474411140353,
						"perplexity_stderr,none": 0.17133813144217366
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.05297884727343295,
						"acc_stderr,none": 0.0031206363637928084,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 536.0078448175774,
						"perplexity_stderr,none": 20.31564421177171
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.2684478371501272,
						"exact_match_stderr,get-answer": 0.01118058458209665
					},
					"logiqa": {
						"acc,none": 0.22119815668202766,
						"acc_norm,none": 0.29493087557603687,
						"acc_norm_stderr,none": 0.0178862497341044,
						"acc_stderr,none": 0.016279743532401657,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.21882951653944022,
						"acc_norm,none": 0.2639949109414758,
						"acc_norm_stderr,none": 0.011121160118426503,
						"acc_stderr,none": 0.010431284021341816,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2371859296482412,
						"acc_norm,none": 0.23584589614740367,
						"acc_norm_stderr,none": 0.0077715067283657385,
						"acc_stderr,none": 0.007786717148416355,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3543740732895573,
						"acc_stderr,none": 0.0049228029544205396,
						"alias": "mc_taco",
						"f1,none": 0.5073541296266365,
						"f1_stderr,none": 0.00549819374119088
					},
					"medmcqa": {
						"acc,none": 0.25364570882142,
						"acc_norm,none": 0.25364570882142,
						"acc_norm_stderr,none": 0.006728128268656896,
						"acc_stderr,none": 0.006728128268656896,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.24901806755695208,
						"acc_norm,none": 0.24901806755695208,
						"acc_norm_stderr,none": 0.012125135984037815,
						"acc_stderr,none": 0.012125135984037815,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.27047429141147983,
						"acc_stderr,none": 0.0414814066692418,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34074074074074073,
						"acc_stderr,none": 0.040943762699967926,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3157894736842105,
						"acc_stderr,none": 0.0378272898086547,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036844,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2792452830188679,
						"acc_stderr,none": 0.027611163402399715,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2569444444444444,
						"acc_stderr,none": 0.03653946969442099,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2947976878612717,
						"acc_stderr,none": 0.034765996075164785,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.27450980392156865,
						"acc_stderr,none": 0.04440521906179328,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322695,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2170212765957447,
						"acc_stderr,none": 0.026947483121496238,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03947152782669415,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.31724137931034485,
						"acc_stderr,none": 0.03878352372138622,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2724867724867725,
						"acc_stderr,none": 0.02293097307163335,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.31746031746031744,
						"acc_stderr,none": 0.04163453031302859,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.047609522856952365,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2806451612903226,
						"acc_stderr,none": 0.025560604721022884,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.35467980295566504,
						"acc_stderr,none": 0.03366124489051449,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.28484848484848485,
						"acc_stderr,none": 0.035243908445117836,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.02962022787479048,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.21243523316062177,
						"acc_stderr,none": 0.02951928261681725,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.24102564102564103,
						"acc_stderr,none": 0.02168554666533319,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.027309140588230172,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.23949579831932774,
						"acc_stderr,none": 0.02772206549336127,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.31125827814569534,
						"acc_stderr,none": 0.03780445850526733,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.23853211009174313,
						"acc_stderr,none": 0.018272575810231867,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.029157522184605603,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.03019028245350195,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.27848101265822783,
						"acc_stderr,none": 0.029178682304842548,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.25112107623318386,
						"acc_stderr,none": 0.029105220833224615,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.32061068702290074,
						"acc_stderr,none": 0.04093329229834278,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.27460148777895854,
						"acc_stderr,none": 0.03138221628907931,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.3140495867768595,
						"acc_stderr,none": 0.04236964753041017,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.32407407407407407,
						"acc_stderr,none": 0.04524596007030048,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.294478527607362,
						"acc_stderr,none": 0.03581165790474082,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.26785714285714285,
						"acc_stderr,none": 0.04203277291467764,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2621359223300971,
						"acc_stderr,none": 0.04354631077260595,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.24786324786324787,
						"acc_stderr,none": 0.028286324075564407,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.25798212005108556,
						"acc_stderr,none": 0.01564583018834895,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2774566473988439,
						"acc_stderr,none": 0.024105712607754307,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2569832402234637,
						"acc_stderr,none": 0.014614465821966344,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.30392156862745096,
						"acc_stderr,none": 0.026336613469046644,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.25973607981976177,
						"acc_stderr,none": 0.03792335098725098,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3247588424437299,
						"acc_stderr,none": 0.026596782287697046,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.025407197798890162,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.22695035460992907,
						"acc_stderr,none": 0.024987106365642976,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.26401564537157757,
						"acc_stderr,none": 0.011258435537723805,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.20588235294117646,
						"acc_stderr,none": 0.024562204314142314,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2826797385620915,
						"acc_stderr,none": 0.018217269552053442,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.20909090909090908,
						"acc_stderr,none": 0.03895091015724138,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.40816326530612246,
						"acc_stderr,none": 0.03146465712827424,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.26714332141696456,
						"acc_stderr,none": 0.048526195180326866,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.2835820895522388,
						"acc_stderr,none": 0.03187187537919797,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2781477957500793,
						"acc_stderr,none": 0.048696774228994315,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.29518072289156627,
						"acc_stderr,none": 0.03550920185689629,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.23391812865497075,
						"acc_stderr,none": 0.03246721765117826,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.36566479877738156,
						"acc_stderr,none": 0.004861585819619509,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.36960943856794143,
						"acc_stderr,none": 0.004868302203036299,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6421568627450981,
						"acc_stderr,none": 0.02376127309720949,
						"alias": "mrpc",
						"f1,none": 0.7794561933534743,
						"f1_stderr,none": 0.017779537252399034
					},
					"multimedqa": {
						"acc,none": 0.2807665010645848,
						"acc_norm,none": 0.2526954308110447,
						"acc_norm_stderr,none": 8.253874567293189e-05,
						"acc_stderr,none": 0.08897447239000647,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5713696369636964,
						"acc_stderr,none": 0.007108263771672476,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6889578630549283,
						"mrr_stderr,none": 0.01031837871395473,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407435,
						"r@2,none": 0.42663656884875845,
						"r@2_stderr,none": 0.016625411323052963
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6356282939558373,
						"mrr_stderr,none": 0.010437632898951917,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4616252821670429,
						"r@2_stderr,none": 0.01675774147880103
					},
					"openbookqa": {
						"acc,none": 0.236,
						"acc_norm,none": 0.374,
						"acc_norm_stderr,none": 0.021660710347204484,
						"acc_stderr,none": 0.01900869962208472,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.495,
						"acc_stderr,none": 0.011182576850283838,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.419,
						"acc_stderr,none": 0.011035415270622932,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.472,
						"acc_stderr,none": 0.011165587094621543,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5315,
						"acc_stderr,none": 0.011160921022883276,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.541,
						"acc_stderr,none": 0.011145474902641256,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.0111357084193598,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5105,
						"acc_stderr,none": 0.011180669867648658,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5021428571428571,
						"acc_stderr,none": 0.0348189958749277,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7600652883569097,
						"acc_norm,none": 0.764417845484222,
						"acc_norm_stderr,none": 0.009901067586473904,
						"acc_stderr,none": 0.009963625892809545,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2933390264730999,
						"acc_norm,none": 0.2685204953031597,
						"acc_norm_stderr,none": 0.0032378964241547518,
						"acc_stderr,none": 0.0033263169445066306,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.614,
						"acc_stderr,none": 0.021793529219281172,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.6979504521700922,
						"acc_norm,none": 0.5494688984026932,
						"acc_norm_stderr,none": 0.010934720326948854,
						"acc_stderr,none": 0.16458027044509774,
						"alias": "pythia",
						"bits_per_byte,none": 0.7111912590639013,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6371553876002254,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.281544042643734,
						"perplexity_stderr,none": 0.12533612783107617,
						"word_perplexity,none": 13.958385271895754,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.39361702127659576,
						"acc_norm,none": 0.4432624113475177,
						"acc_norm_stderr,none": 0.055356432129099506,
						"acc_stderr,none": 0.04456070060427784,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4583333333333333,
						"acc_norm,none": 0.5416666666666666,
						"acc_norm_stderr,none": 0.04567549854280213,
						"acc_stderr,none": 0.04567549854280213,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.36875,
						"acc_norm,none": 0.44375,
						"acc_norm_stderr,none": 0.039400853796259426,
						"acc_stderr,none": 0.03826204233503226,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.38028169014084506,
						"acc_norm,none": 0.4014084507042254,
						"acc_norm_stderr,none": 0.029138375022747656,
						"acc_stderr,none": 0.028857363751758305,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5083287570931723,
						"acc_stderr,none": 0.0067644718780247755,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.47590897848132574,
						"acc_stderr,none": 0.0024838125052295467,
						"alias": "qqp",
						"f1,none": 0.40999081112689,
						"f1_stderr,none": 0.0032656068543867327
					},
					"race": {
						"acc,none": 0.3799043062200957,
						"acc_stderr,none": 0.015021600804935645,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.4548736462093863,
						"acc_stderr,none": 0.029973636495415252,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.918,
						"acc_norm,none": 0.866,
						"acc_norm_stderr,none": 0.01077776229836968,
						"acc_stderr,none": 0.008680515615523722,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.4548736462093863,
						"acc_stderr,none": 0.029973636495415252,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.5321100917431193,
						"acc_stderr,none": 0.016906881526426512,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5217434769569129,
						"acc_norm,none": 0.7139858042587224,
						"acc_norm_stderr,none": 0.003194988543147053,
						"acc_stderr,none": 0.003531747832137498,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5729260257562144,
						"acc_stderr,none": 0.032579735141498145,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5073116987179487,
						"acc_stderr,none": 0.005003720333450553,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.6277490625316712,
						"acc_stderr,none": 0.004866765053938566,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5841176470588235,
						"acc_stderr,none": 0.004880413433633874,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.2782728331670533,
						"acc_stderr,none": 0.001510473801712159,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2962056303549572,
						"bleu_acc_stderr,none": 0.015983595101811396,
						"bleu_diff,none": -9.624485667430166,
						"bleu_diff_stderr,none": 0.7856752160102788,
						"bleu_max,none": 22.931271947400514,
						"bleu_max_stderr,none": 0.7146900684034068,
						"rouge1_acc,none": 0.2558139534883721,
						"rouge1_acc_stderr,none": 0.01527417621928336,
						"rouge1_diff,none": -11.440333526932625,
						"rouge1_diff_stderr,none": 0.8699394714213237,
						"rouge1_max,none": 47.36490881253339,
						"rouge1_max_stderr,none": 0.8636030885388936,
						"rouge2_acc,none": 0.19216646266829865,
						"rouge2_acc_stderr,none": 0.013792870480628954,
						"rouge2_diff,none": -14.349105910615792,
						"rouge2_diff_stderr,none": 1.0130096165493152,
						"rouge2_max,none": 30.198128763581725,
						"rouge2_max_stderr,none": 0.970764485583704,
						"rougeL_acc,none": 0.2460220318237454,
						"rougeL_acc_stderr,none": 0.015077219200662583,
						"rougeL_diff,none": -11.883965318286142,
						"rougeL_diff_stderr,none": 0.8760767860621038,
						"rougeL_max,none": 44.718745125405206,
						"rougeL_max_stderr,none": 0.8693314562635015
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2962056303549572,
						"bleu_acc_stderr,none": 0.015983595101811396,
						"bleu_diff,none": -9.624485667430166,
						"bleu_diff_stderr,none": 0.7856752160102788,
						"bleu_max,none": 22.931271947400514,
						"bleu_max_stderr,none": 0.7146900684034068,
						"rouge1_acc,none": 0.2558139534883721,
						"rouge1_acc_stderr,none": 0.01527417621928336,
						"rouge1_diff,none": -11.440333526932625,
						"rouge1_diff_stderr,none": 0.8699394714213237,
						"rouge1_max,none": 47.36490881253339,
						"rouge1_max_stderr,none": 0.8636030885388936,
						"rouge2_acc,none": 0.19216646266829865,
						"rouge2_acc_stderr,none": 0.013792870480628954,
						"rouge2_diff,none": -14.349105910615792,
						"rouge2_diff_stderr,none": 1.0130096165493152,
						"rouge2_max,none": 30.198128763581725,
						"rouge2_max_stderr,none": 0.970764485583704,
						"rougeL_acc,none": 0.2460220318237454,
						"rougeL_acc_stderr,none": 0.015077219200662583,
						"rougeL_diff,none": -11.883965318286142,
						"rougeL_diff_stderr,none": 0.8760767860621038,
						"rougeL_max,none": 44.718745125405206,
						"rougeL_max_stderr,none": 0.8693314562635015
					},
					"truthfulqa_mc1": {
						"acc,none": 0.20563035495716034,
						"acc_stderr,none": 0.014148482219460978,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.35091531137694626,
						"acc_stderr,none": 0.013450683705227093,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.029035433070866142,
						"exact_match_stderr,none": 0.003725725747722712
					},
					"wic": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.01981072129375818,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.7111912590639013,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6371553876002254,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 13.958385271895754,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6053670086819258,
						"acc_stderr,none": 0.013736915172371883,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5352112676056338,
						"acc_stderr,none": 0.05961305784972239,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7728937728937729,
						"acc_stderr,none": 0.02540329042459516,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5598181818181818,
						"acc_stderr,none": 0.06067967436589544,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.022342748192502846,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.66,
						"acc_stderr,none": 0.02120611701367307,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.022374298166353196,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.02236139673920787,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.020435342091896135,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.021854684955611266,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.3840160642570281,
						"acc_stderr,none": 0.04838130116000915,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3236947791164659,
						"acc_stderr,none": 0.009378357180373085,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3542168674698795,
						"acc_stderr,none": 0.009586620142951844,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.42409638554216866,
						"acc_stderr,none": 0.009905918244994484,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.009448900914617616,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.521285140562249,
						"acc_stderr,none": 0.010012987604500442,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.41646586345381525,
						"acc_stderr,none": 0.009881215932115986,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.43172690763052207,
						"acc_stderr,none": 0.009928203186112919,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3393574297188755,
						"acc_stderr,none": 0.009490727635646758,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.38393574297188754,
						"acc_stderr,none": 0.009748321202534391,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3289156626506024,
						"acc_stderr,none": 0.009417125981806735,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.43052208835341366,
						"acc_stderr,none": 0.009924844537285534,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3598393574297189,
						"acc_stderr,none": 0.009620250217765998,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3329317269076305,
						"acc_stderr,none": 0.009446051001358228,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.4389558232931727,
						"acc_stderr,none": 0.009947100105978386,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3409638554216867,
						"acc_stderr,none": 0.009501591178361543,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5550809217255279,
						"acc_stderr,none": 0.05596046996282288,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.4877564526803441,
						"acc_stderr,none": 0.012863267059205548,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7313037723362011,
						"acc_stderr,none": 0.011407519447092172,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5651886168100596,
						"acc_stderr,none": 0.012757297463352968,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.4983454665784249,
						"acc_stderr,none": 0.012867054869163341,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.499669093315685,
						"acc_stderr,none": 0.012867122498493417,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6353408338848445,
						"acc_stderr,none": 0.012386781532906167,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5526141628060887,
						"acc_stderr,none": 0.01279568816738529,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5208471211118465,
						"acc_stderr,none": 0.012855936282881265,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.4923891462607545,
						"acc_stderr,none": 0.01286563457111448,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5228325612177366,
						"acc_stderr,none": 0.012853702384870849,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.599602911978822,
						"acc_stderr,none": 0.01260923817555117,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7318498538997528,
						"acc_stderr,none": 0.06388725733791883,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8361290322580646,
						"acc_stderr,none": 0.007678379958837628,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.5662650602409639,
						"acc_stderr,none": 0.05472870359742141,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5672575599582899,
						"acc_stderr,none": 0.016007449356284165,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6045627376425855,
						"acc_stderr,none": 0.030207086392235353,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5746031746031746,
						"acc_stderr,none": 0.02790077769497624,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7559523809523809,
						"acc_stderr,none": 0.01915139944664687,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "aisingapore/sealion7b"
	},
	"allenai/OLMo-7B": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6138669673055243,
						"acc_norm,none": 0.5941375422773394,
						"acc_norm_stderr,none": 0.09114464840453876,
						"acc_stderr,none": 0.11629284496368797,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3478125,
						"acc_stderr,none": 0.017229187207023548,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.00685,
						"acc_stderr,none": 0.008348131833090362,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8324626865671642,
						"acc_stderr,none": 0.1497512374407285,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2526002971768202,
						"acc_norm,none": 0.2526002971768202,
						"acc_norm_stderr,none": 0.11248875724999531,
						"acc_stderr,none": 0.11248875724999531,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.25004317043688495,
						"acc_norm,none": 0.25004317043688495,
						"acc_norm_stderr,none": 0.043083884282466484,
						"acc_stderr,none": 0.043083884282466484,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4772659511031603,
						"likelihood_diff_stderr,none": 0.49088835451046214,
						"pct_stereotype,none": 0.5608228980322003,
						"pct_stereotype_stderr,none": 0.09848087406351029
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.029035433070866142,
						"exact_match_stderr,none": 0.0037257257477226868
					},
					"glue": {
						"acc,none": 0.47397570271557893,
						"acc_stderr,none": 0.006104309039981269,
						"alias": "glue",
						"f1,none": 0.4450259440671591,
						"f1_stderr,none": 0.0011613802806875427,
						"mcc,none": 0.003737743780434562,
						"mcc_stderr,none": 0.031103768987297463
					},
					"kmmlu": {
						"acc,none": 0.26988160554432566,
						"acc_norm,none": 0.26988160554432566,
						"acc_norm_stderr,none": 0.02356571648698782,
						"acc_stderr,none": 0.02356571648698782,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.48541986406489807,
						"acc_norm,none": 0.462,
						"acc_norm_stderr,none": 0.0004981082164328657,
						"acc_stderr,none": 0.0366339172650086,
						"alias": "kobest",
						"f1,none": 0.39393512492621,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6634969920434698,
						"acc_stderr,none": 0.013626250353494346,
						"alias": "lambada",
						"perplexity,none": 4.581309477921781,
						"perplexity_stderr,none": 0.24450204428335026
					},
					"lambada_cloze": {
						"acc,none": 0.09441102270522025,
						"acc_stderr,none": 0.008464918604058229,
						"alias": "lambada_cloze",
						"perplexity,none": 202.80427845102082,
						"perplexity_stderr,none": 6.42809109784278
					},
					"lambada_multilingual": {
						"acc,none": 0.41979429458567824,
						"acc_stderr,none": 0.0786758552445782,
						"alias": "lambada_multilingual",
						"perplexity,none": 69.87405628148183,
						"perplexity_stderr,none": 21.087915987921274
					},
					"mmlu": {
						"acc,none": 0.2812277453354224,
						"acc_stderr,none": 0.04207303520666168,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2741764080765144,
						"acc_stderr,none": 0.039627659898151486,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.28644995172191834,
						"acc_stderr,none": 0.042803745332077855,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.28501787455313626,
						"acc_stderr,none": 0.035416759074404516,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.28290516967967017,
						"acc_stderr,none": 0.04961588475618063,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2828956706884315,
						"acc_norm,none": 0.24806941129661894,
						"acc_norm_stderr,none": 8.141668564682874e-05,
						"acc_stderr,none": 0.10602985794277944,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.48028571428571426,
						"acc_stderr,none": 0.044735701540947824,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7279173015692755,
						"acc_norm,none": 0.5992547792399183,
						"acc_norm_stderr,none": 0.01044896121196604,
						"acc_stderr,none": 0.14377926620120451,
						"alias": "pythia",
						"bits_per_byte,none": 0.6487896109299826,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5678522506594048,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.133552279354633,
						"perplexity_stderr,none": 0.08746793214630093,
						"word_perplexity,none": 11.076041195986809,
						"word_perplexity_stderr,none": "N/A"
					},
					"sycophancy": {
						"acc,none": 0.57458986389804,
						"acc_stderr,none": 0.028184359004039327,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.30152155175145756,
						"acc_stderr,none": 0.00101315922710758,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2998776009791922,
						"bleu_acc_stderr,none": 0.016040352966713647,
						"bleu_diff,none": -9.095219004961898,
						"bleu_diff_stderr,none": 0.7804871290932788,
						"bleu_max,none": 26.594375993138968,
						"bleu_max_stderr,none": 0.7778671069690596,
						"rouge1_acc,none": 0.31701346389228885,
						"rouge1_acc_stderr,none": 0.016289203374403358,
						"rouge1_diff,none": -11.175921692345224,
						"rouge1_diff_stderr,none": 0.8195761806099192,
						"rouge1_max,none": 52.11636563362932,
						"rouge1_max_stderr,none": 0.829198117947217,
						"rouge2_acc,none": 0.2607099143206854,
						"rouge2_acc_stderr,none": 0.015368841620766373,
						"rouge2_diff,none": -13.49748922413662,
						"rouge2_diff_stderr,none": 1.0086880301697758,
						"rouge2_max,none": 36.46886632756582,
						"rouge2_max_stderr,none": 0.9690264435785303,
						"rougeL_acc,none": 0.3023255813953488,
						"rougeL_acc_stderr,none": 0.016077509266133033,
						"rougeL_diff,none": -11.330097263561317,
						"rougeL_diff_stderr,none": 0.8162709159821883,
						"rougeL_max,none": 49.2588863909637,
						"rougeL_max_stderr,none": 0.8341363234633794
					},
					"xcopa": {
						"acc,none": 0.5378181818181818,
						"acc_stderr,none": 0.03414712992987977,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.39435073627844713,
						"acc_stderr,none": 0.05742628184405443,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5533361410264123,
						"acc_stderr,none": 0.07308704147277036,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7473589570690042,
						"acc_stderr,none": 0.0647846885953495,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6138669673055243,
						"acc_norm,none": 0.5941375422773394,
						"acc_norm_stderr,none": 0.09114464840453876,
						"acc_stderr,none": 0.11629284496368797,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3478125,
						"acc_stderr,none": 0.017229187207023548,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.328,
						"acc_stderr,none": 0.014853842487270334,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.356,
						"acc_stderr,none": 0.015149042659306626,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3575,
						"acc_stderr,none": 0.0138409212452578,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3677474402730375,
						"acc_norm,none": 0.40187713310580203,
						"acc_norm_stderr,none": 0.014327268614578274,
						"acc_stderr,none": 0.01409099561816847,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7352693602693603,
						"acc_norm,none": 0.688973063973064,
						"acc_norm_stderr,none": 0.009498790639757611,
						"acc_stderr,none": 0.009053021086173967,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.00685,
						"acc_stderr,none": 0.008348131833090362,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0055,
						"acc_stderr,none": 0.0016541593398342208,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.015,
						"acc_stderr,none": 0.0027186753387999584,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.029,
						"acc_stderr,none": 0.003753204400460514,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0155,
						"acc_stderr,none": 0.002762913651550316,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521572,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521436,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000152,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.016052060737527116,
						"acc_stderr,none": 0.002618244621382576,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8324626865671642,
						"acc_stderr,none": 0.1497512374407285,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787726,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448786,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329816,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.012510816141264352,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651538,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.743,
						"acc_stderr,none": 0.013825416526895038,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.587,
						"acc_stderr,none": 0.015577986829936531,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.819,
						"acc_stderr,none": 0.01218143617917791,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037183,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319417,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318205,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557423,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406148,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817135,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936684,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323488,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275291,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.00843458014024064,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.707,
						"acc_stderr,none": 0.014399942998441266,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.742,
						"acc_stderr,none": 0.013842963108656603,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816322,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571417,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.864,
						"acc_stderr,none": 0.010845350230472992,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689066,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.383,
						"acc_stderr,none": 0.01538010232565271,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.82,
						"acc_stderr,none": 0.012155153135511961,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.683,
						"acc_stderr,none": 0.01472167543888022,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557843986,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306486,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.00914437639315113,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177549,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.653,
						"acc_stderr,none": 0.015060472031706617,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592081,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.675,
						"acc_stderr,none": 0.014818724459095524,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.735,
						"acc_stderr,none": 0.013963164754809953,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656799,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.717,
						"acc_stderr,none": 0.014251810906481742,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.00969892102602496,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400241,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.742,
						"acc_stderr,none": 0.013842963108656603,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.005814534272734944,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.838,
						"acc_stderr,none": 0.011657267771304408,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.736,
						"acc_stderr,none": 0.013946271849440472,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.372,
						"acc_stderr,none": 0.015292149942040577,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679195,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697587,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.763,
						"acc_stderr,none": 0.013454070462577952,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.567,
						"acc_stderr,none": 0.01567663091218133,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037183,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.668,
						"acc_stderr,none": 0.014899597242811483,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.856,
						"acc_stderr,none": 0.011107987548939149,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357791,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408033,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.011569479368271294,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315174,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074794,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727068,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389631,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.338,
						"acc_stderr,none": 0.014965960710224489,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.255,
						"acc_stderr,none": 0.013790038620872842,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7262996941896025,
						"acc_stderr,none": 0.0077980876386284275,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.125,
						"acc_stderr,none": 0.04459412925079224,
						"alias": "cb",
						"f1,none": 0.11129975476325221,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2526002971768202,
						"acc_norm,none": 0.2526002971768202,
						"acc_norm_stderr,none": 0.11248875724999531,
						"acc_stderr,none": 0.11248875724999531,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.062069005411206336,
						"acc_stderr,none": 0.062069005411206336,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.07872958216222173,
						"acc_stderr,none": 0.07872958216222173,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275463,
						"acc_stderr,none": 0.08124094920275463,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.19148936170212766,
						"acc_norm,none": 0.19148936170212766,
						"acc_norm_stderr,none": 0.05801446334976932,
						"acc_stderr,none": 0.05801446334976932,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.34545454545454546,
						"acc_norm,none": 0.34545454545454546,
						"acc_norm_stderr,none": 0.06470956516382613,
						"acc_stderr,none": 0.06470956516382613,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.0686105685212965,
						"acc_stderr,none": 0.0686105685212965,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894599,
						"acc_stderr,none": 0.10540925533894599,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522557,
						"acc_stderr,none": 0.11369720523522557,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.1875,
						"acc_norm,none": 0.1875,
						"acc_norm_stderr,none": 0.10077822185373188,
						"acc_stderr,none": 0.10077822185373188,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.08446516354424752,
						"acc_stderr,none": 0.08446516354424752,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502246,
						"acc_stderr,none": 0.07401656182502246,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.3225806451612903,
						"acc_norm,none": 0.3225806451612903,
						"acc_norm_stderr,none": 0.08534681648595453,
						"acc_stderr,none": 0.08534681648595453,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522561,
						"acc_stderr,none": 0.11369720523522561,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09933992677987828,
						"acc_stderr,none": 0.09933992677987828,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.09829463743659808,
						"acc_stderr,none": 0.09829463743659808,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.0808104675899639,
						"acc_stderr,none": 0.0808104675899639,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756935,
						"acc_stderr,none": 0.10513149660756935,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522561,
						"acc_stderr,none": 0.11369720523522561,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736841,
						"acc_stderr,none": 0.05263157894736841,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628251,
						"acc_stderr,none": 0.05817221556628251,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033673,
						"acc_stderr,none": 0.10083169033033673,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.06390760676613884,
						"acc_stderr,none": 0.06390760676613884,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.061487546190134544,
						"acc_stderr,none": 0.061487546190134544,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.25004317043688495,
						"acc_norm,none": 0.25004317043688495,
						"acc_norm_stderr,none": 0.043083884282466484,
						"acc_stderr,none": 0.043083884282466484,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.1952662721893491,
						"acc_norm,none": 0.1952662721893491,
						"acc_norm_stderr,none": 0.030583351673923103,
						"acc_stderr,none": 0.030583351673923103,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.23648648648648649,
						"acc_norm,none": 0.23648648648648649,
						"acc_norm_stderr,none": 0.035047162412504336,
						"acc_stderr,none": 0.035047162412504336,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.033635910482728223,
						"acc_stderr,none": 0.033635910482728223,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.2875,
						"acc_norm,none": 0.2875,
						"acc_norm_stderr,none": 0.035893251060583956,
						"acc_stderr,none": 0.035893251060583956,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.031234752377721164,
						"acc_stderr,none": 0.031234752377721164,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.24880382775119617,
						"acc_norm,none": 0.24880382775119617,
						"acc_norm_stderr,none": 0.02997599063670254,
						"acc_stderr,none": 0.02997599063670254,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.03311643267635493,
						"acc_stderr,none": 0.03311643267635493,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2366412213740458,
						"acc_norm,none": 0.2366412213740458,
						"acc_norm_stderr,none": 0.037276735755969195,
						"acc_stderr,none": 0.037276735755969195,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.20588235294117646,
						"acc_norm,none": 0.20588235294117646,
						"acc_norm_stderr,none": 0.03480046931235067,
						"acc_stderr,none": 0.03480046931235067,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2336448598130841,
						"acc_norm,none": 0.2336448598130841,
						"acc_norm_stderr,none": 0.041099848424639984,
						"acc_stderr,none": 0.041099848424639984,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2848297213622291,
						"acc_norm,none": 0.2848297213622291,
						"acc_norm_stderr,none": 0.02515182168617952,
						"acc_stderr,none": 0.02515182168617952,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.22058823529411764,
						"acc_norm,none": 0.22058823529411764,
						"acc_norm_stderr,none": 0.0291022543896741,
						"acc_stderr,none": 0.0291022543896741,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2346368715083799,
						"acc_norm,none": 0.2346368715083799,
						"acc_norm_stderr,none": 0.03176302794175762,
						"acc_stderr,none": 0.03176302794175762,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.028304657943035286,
						"acc_stderr,none": 0.028304657943035286,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.29245283018867924,
						"acc_norm,none": 0.29245283018867924,
						"acc_norm_stderr,none": 0.044392639061996274,
						"acc_stderr,none": 0.044392639061996274,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.0452235007738203,
						"acc_stderr,none": 0.0452235007738203,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.0430254877395901,
						"acc_stderr,none": 0.0430254877395901,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.04414343666854933,
						"acc_stderr,none": 0.04414343666854933,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.045521571818039494,
						"acc_stderr,none": 0.045521571818039494,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.1792452830188679,
						"acc_norm,none": 0.1792452830188679,
						"acc_norm_stderr,none": 0.037431386312552786,
						"acc_stderr,none": 0.037431386312552786,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.02582505450222104,
						"acc_stderr,none": 0.02582505450222104,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.23529411764705882,
						"acc_norm,none": 0.23529411764705882,
						"acc_norm_stderr,none": 0.02977177522814565,
						"acc_stderr,none": 0.02977177522814565,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.03377310252209193,
						"acc_stderr,none": 0.03377310252209193,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.034531515032766795,
						"acc_stderr,none": 0.034531515032766795,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.28776978417266186,
						"acc_norm,none": 0.28776978417266186,
						"acc_norm_stderr,none": 0.03853836179233389,
						"acc_stderr,none": 0.03853836179233389,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.27672955974842767,
						"acc_norm,none": 0.27672955974842767,
						"acc_norm_stderr,none": 0.03559177035707935,
						"acc_stderr,none": 0.03559177035707935,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2822085889570552,
						"acc_norm,none": 0.2822085889570552,
						"acc_norm_stderr,none": 0.03536117886664743,
						"acc_stderr,none": 0.03536117886664743,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.24206349206349206,
						"acc_norm,none": 0.24206349206349206,
						"acc_norm_stderr,none": 0.027036109679236982,
						"acc_stderr,none": 0.027036109679236982,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.23737373737373738,
						"acc_norm,none": 0.23737373737373738,
						"acc_norm_stderr,none": 0.0303137105381989,
						"acc_stderr,none": 0.0303137105381989,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.25630252100840334,
						"acc_norm,none": 0.25630252100840334,
						"acc_norm_stderr,none": 0.02835962087053395,
						"acc_stderr,none": 0.02835962087053395,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2217391304347826,
						"acc_norm,none": 0.2217391304347826,
						"acc_norm_stderr,none": 0.027451496604058923,
						"acc_stderr,none": 0.027451496604058923,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03785714465066652,
						"acc_stderr,none": 0.03785714465066652,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3006993006993007,
						"acc_norm,none": 0.3006993006993007,
						"acc_norm_stderr,none": 0.03848167949490064,
						"acc_stderr,none": 0.03848167949490064,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2897727272727273,
						"acc_norm,none": 0.2897727272727273,
						"acc_norm_stderr,none": 0.034293230802398746,
						"acc_stderr,none": 0.034293230802398746,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2550335570469799,
						"acc_norm,none": 0.2550335570469799,
						"acc_norm_stderr,none": 0.035829121651111746,
						"acc_stderr,none": 0.035829121651111746,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.21893491124260356,
						"acc_norm,none": 0.21893491124260356,
						"acc_norm_stderr,none": 0.03190409884491231,
						"acc_stderr,none": 0.03190409884491231,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2288135593220339,
						"acc_norm,none": 0.2288135593220339,
						"acc_norm_stderr,none": 0.03883538724538848,
						"acc_stderr,none": 0.03883538724538848,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.03445000289173461,
						"acc_stderr,none": 0.03445000289173461,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.04265792110940588,
						"acc_stderr,none": 0.04265792110940588,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2097902097902098,
						"acc_norm,none": 0.2097902097902098,
						"acc_norm_stderr,none": 0.03416800637471349,
						"acc_stderr,none": 0.03416800637471349,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.18253968253968253,
						"acc_norm,none": 0.18253968253968253,
						"acc_norm_stderr,none": 0.034550710191021475,
						"acc_stderr,none": 0.034550710191021475,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.03162930395697948,
						"acc_stderr,none": 0.03162930395697948,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.22674418604651161,
						"acc_norm,none": 0.22674418604651161,
						"acc_norm_stderr,none": 0.03202075899584939,
						"acc_stderr,none": 0.03202075899584939,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2773722627737226,
						"acc_norm,none": 0.2773722627737226,
						"acc_norm_stderr,none": 0.02211041530412192,
						"acc_stderr,none": 0.02211041530412192,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.29439252336448596,
						"acc_norm,none": 0.29439252336448596,
						"acc_norm_stderr,none": 0.0312287911542499,
						"acc_stderr,none": 0.0312287911542499,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.25203252032520324,
						"acc_norm,none": 0.25203252032520324,
						"acc_norm_stderr,none": 0.039308795268239924,
						"acc_stderr,none": 0.039308795268239924,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.30327868852459017,
						"acc_norm,none": 0.30327868852459017,
						"acc_norm_stderr,none": 0.041788598786318756,
						"acc_stderr,none": 0.041788598786318756,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.02716201711702204,
						"acc_stderr,none": 0.02716201711702204,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032364888900157734,
						"acc_stderr,none": 0.032364888900157734,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871163,
						"acc_stderr,none": 0.03152480234871163,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.22413793103448276,
						"acc_norm,none": 0.22413793103448276,
						"acc_norm_stderr,none": 0.03888669370117825,
						"acc_stderr,none": 0.03888669370117825,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2620689655172414,
						"acc_norm,none": 0.2620689655172414,
						"acc_norm_stderr,none": 0.036646663372252565,
						"acc_stderr,none": 0.036646663372252565,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.29523809523809524,
						"acc_norm,none": 0.29523809523809524,
						"acc_norm_stderr,none": 0.044729159560441434,
						"acc_stderr,none": 0.044729159560441434,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.24571428571428572,
						"acc_norm,none": 0.24571428571428572,
						"acc_norm_stderr,none": 0.03263687142627841,
						"acc_stderr,none": 0.03263687142627841,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2796208530805687,
						"acc_norm,none": 0.2796208530805687,
						"acc_norm_stderr,none": 0.030971033440870908,
						"acc_stderr,none": 0.030971033440870908,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2712765957446808,
						"acc_norm,none": 0.2712765957446808,
						"acc_norm_stderr,none": 0.02296000025237266,
						"acc_stderr,none": 0.02296000025237266,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.23706896551724138,
						"acc_norm,none": 0.23706896551724138,
						"acc_norm_stderr,none": 0.02798169400862497,
						"acc_stderr,none": 0.02798169400862497,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.033291151121447815,
						"acc_stderr,none": 0.033291151121447815,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.23703703703703705,
						"acc_norm,none": 0.23703703703703705,
						"acc_norm_stderr,none": 0.03673731683969506,
						"acc_stderr,none": 0.03673731683969506,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.23008849557522124,
						"acc_norm,none": 0.23008849557522124,
						"acc_norm_stderr,none": 0.02805928483916018,
						"acc_stderr,none": 0.02805928483916018,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.23030303030303031,
						"acc_norm,none": 0.23030303030303031,
						"acc_norm_stderr,none": 0.0328766675860349,
						"acc_stderr,none": 0.0328766675860349,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.23243243243243245,
						"acc_norm,none": 0.23243243243243245,
						"acc_norm_stderr,none": 0.03113850517079465,
						"acc_stderr,none": 0.03113850517079465,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.033341501981019636,
						"acc_stderr,none": 0.033341501981019636,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2360248447204969,
						"acc_norm,none": 0.2360248447204969,
						"acc_norm_stderr,none": 0.03357055232967969,
						"acc_stderr,none": 0.03357055232967969,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.034893706520187605,
						"acc_stderr,none": 0.034893706520187605,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.003737743780434562,
						"mcc_stderr,none": 0.031103768987297463
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.03379976689896309,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4772659511031603,
						"likelihood_diff_stderr,none": 0.49088835451046214,
						"pct_stereotype,none": 0.5608228980322003,
						"pct_stereotype_stderr,none": 0.09848087406351029
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.465638044126416,
						"likelihood_diff_stderr,none": 0.0817674558279069,
						"pct_stereotype,none": 0.6446034585569469,
						"pct_stereotype_stderr,none": 0.011691383517451213
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.79532967032967,
						"likelihood_diff_stderr,none": 0.3741520555832592,
						"pct_stereotype,none": 0.7362637362637363,
						"pct_stereotype_stderr,none": 0.046449428524973954
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.693181818181818,
						"likelihood_diff_stderr,none": 1.6696967978331319,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726127
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.996153846153846,
						"likelihood_diff_stderr,none": 0.5963188938712826,
						"pct_stereotype,none": 0.676923076923077,
						"pct_stereotype_stderr,none": 0.05845647751373334
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.61015625,
						"likelihood_diff_stderr,none": 0.1573761763903728,
						"pct_stereotype,none": 0.659375,
						"pct_stereotype_stderr,none": 0.026534392975531496
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.419560185185185,
						"likelihood_diff_stderr,none": 0.22476506938859697,
						"pct_stereotype,none": 0.6111111111111112,
						"pct_stereotype_stderr,none": 0.03324708911809117
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.828125,
						"likelihood_diff_stderr,none": 0.3062704344289238,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.176919291338583,
						"likelihood_diff_stderr,none": 0.13862152723353277,
						"pct_stereotype,none": 0.5393700787401575,
						"pct_stereotype_stderr,none": 0.022136834498576036
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.3355855855855854,
						"likelihood_diff_stderr,none": 0.2889364817644626,
						"pct_stereotype,none": 0.7477477477477478,
						"pct_stereotype_stderr,none": 0.04140938118194942
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.259408602150538,
						"likelihood_diff_stderr,none": 0.43254953377275895,
						"pct_stereotype,none": 0.8602150537634409,
						"pct_stereotype_stderr,none": 0.036152622588464155
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.128289473684211,
						"likelihood_diff_stderr,none": 0.23747047298192855,
						"pct_stereotype,none": 0.6631578947368421,
						"pct_stereotype_stderr,none": 0.03437880340748323
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.4888938580799045,
						"likelihood_diff_stderr,none": 0.08156240137076916,
						"pct_stereotype,none": 0.4770423375074538,
						"pct_stereotype_stderr,none": 0.01220041828317914
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.2333333333333334,
						"likelihood_diff_stderr,none": 0.34675236624373657,
						"pct_stereotype,none": 0.45555555555555555,
						"pct_stereotype_stderr,none": 0.05279009646630345
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.9423076923076925,
						"likelihood_diff_stderr,none": 0.6800463350033433,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.412878787878788,
						"likelihood_diff_stderr,none": 0.4983507889678439,
						"pct_stereotype,none": 0.6212121212121212,
						"pct_stereotype_stderr,none": 0.0601674102524024
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.787772585669782,
						"likelihood_diff_stderr,none": 0.15454171718993162,
						"pct_stereotype,none": 0.4735202492211838,
						"pct_stereotype_stderr,none": 0.027911625198936637
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.394268774703558,
						"likelihood_diff_stderr,none": 0.2393255401900638,
						"pct_stereotype,none": 0.30434782608695654,
						"pct_stereotype_stderr,none": 0.028985507246376746
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.7291666666666665,
						"likelihood_diff_stderr,none": 0.42202395141887994,
						"pct_stereotype,none": 0.5416666666666666,
						"pct_stereotype_stderr,none": 0.05913268547421809
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.0375,
						"likelihood_diff_stderr,none": 0.14411452077441483,
						"pct_stereotype,none": 0.43043478260869567,
						"pct_stereotype_stderr,none": 0.023111017495849547
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.626086956521739,
						"likelihood_diff_stderr,none": 0.33040341695448183,
						"pct_stereotype,none": 0.6,
						"pct_stereotype_stderr,none": 0.04588314677411235
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 4.024725274725275,
						"likelihood_diff_stderr,none": 0.30015906815281673,
						"pct_stereotype,none": 0.7362637362637363,
						"pct_stereotype_stderr,none": 0.046449428524973954
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.6160714285714284,
						"likelihood_diff_stderr,none": 0.24417182803609577,
						"pct_stereotype,none": 0.5561224489795918,
						"pct_stereotype_stderr,none": 0.035579471949536604
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.029035433070866142,
						"exact_match_stderr,none": 0.0037257257477226868
					},
					"glue": {
						"acc,none": 0.47397570271557893,
						"acc_stderr,none": 0.006104309039981269,
						"alias": "glue",
						"f1,none": 0.4450259440671591,
						"f1_stderr,none": 0.0011613802806875427,
						"mcc,none": 0.003737743780434562,
						"mcc_stderr,none": 0.031103768987297463
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.050037907505686124,
						"exact_match_stderr,get-answer": 0.006005442354577737
					},
					"hellaswag": {
						"acc,none": 0.5571599283011353,
						"acc_norm,none": 0.7550288787094205,
						"acc_norm_stderr,none": 0.004291911350430623,
						"acc_stderr,none": 0.004957068377516497,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.26988160554432566,
						"acc_norm,none": 0.26988160554432566,
						"acc_norm_stderr,none": 0.02356571648698782,
						"acc_stderr,none": 0.02356571648698782,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.046056618647183814,
						"acc_stderr,none": 0.046056618647183814,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234185,
						"acc_stderr,none": 0.013807775152234185,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986186,
						"acc_stderr,none": 0.014062601350986186,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.01382541652689504,
						"acc_stderr,none": 0.01382541652689504,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740671,
						"acc_stderr,none": 0.014142984975740671,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.27166666666666667,
						"acc_norm,none": 0.27166666666666667,
						"acc_norm_stderr,none": 0.018174809149686416,
						"acc_stderr,none": 0.018174809149686416,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.308,
						"acc_norm,none": 0.308,
						"acc_norm_stderr,none": 0.01460648312734276,
						"acc_stderr,none": 0.01460648312734276,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.01423652621529135,
						"acc_stderr,none": 0.01423652621529135,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.283,
						"acc_norm,none": 0.283,
						"acc_norm_stderr,none": 0.014251810906481744,
						"acc_stderr,none": 0.014251810906481744,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.03128528159088722,
						"acc_stderr,none": 0.03128528159088722,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633918,
						"acc_stderr,none": 0.014046255632633918,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2230769230769231,
						"acc_norm,none": 0.2230769230769231,
						"acc_norm_stderr,none": 0.03665400868201044,
						"acc_stderr,none": 0.03665400868201044,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909282,
						"acc_stderr,none": 0.04292346959909282,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.298,
						"acc_norm,none": 0.298,
						"acc_norm_stderr,none": 0.014470846741134717,
						"acc_stderr,none": 0.014470846741134717,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568196,
						"acc_stderr,none": 0.014029819522568196,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.274,
						"acc_norm,none": 0.274,
						"acc_norm_stderr,none": 0.014111099288259588,
						"acc_stderr,none": 0.014111099288259588,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.014512395033543152,
						"acc_stderr,none": 0.014512395033543152,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.01394627184944046,
						"acc_stderr,none": 0.01394627184944046,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.013569640199177451,
						"acc_stderr,none": 0.013569640199177451,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.01414298497574067,
						"acc_stderr,none": 0.01414298497574067,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729494,
						"acc_stderr,none": 0.014013292702729494,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485242,
						"acc_stderr,none": 0.014174516461485242,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145153,
						"acc_stderr,none": 0.013979965645145153,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.014370995982377932,
						"acc_stderr,none": 0.014370995982377932,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.253,
						"acc_norm,none": 0.253,
						"acc_norm_stderr,none": 0.01375427861358708,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.01389503767796513,
						"acc_stderr,none": 0.01389503767796513,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306265,
						"acc_stderr,none": 0.014158794845306265,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.23666666666666666,
						"acc_norm,none": 0.23666666666666666,
						"acc_norm_stderr,none": 0.017366497958564646,
						"acc_stderr,none": 0.017366497958564646,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.296,
						"acc_norm,none": 0.296,
						"acc_norm_stderr,none": 0.014442734941575018,
						"acc_stderr,none": 0.014442734941575018,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.013531522534515433,
						"acc_stderr,none": 0.013531522534515433,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568198,
						"acc_stderr,none": 0.014029819522568198,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.229,
						"acc_norm,none": 0.229,
						"acc_norm_stderr,none": 0.013294199326613597,
						"acc_stderr,none": 0.013294199326613597,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.046056618647183814,
						"acc_stderr,none": 0.046056618647183814,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.026241737033622703,
						"acc_stderr,none": 0.026241737033622703,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.237,
						"acc_norm,none": 0.237,
						"acc_norm_stderr,none": 0.013454070462577941,
						"acc_stderr,none": 0.013454070462577941,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361427,
						"acc_stderr,none": 0.014498627873361427,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.01377220656516854,
						"acc_stderr,none": 0.01377220656516854,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.031828687164775826,
						"acc_stderr,none": 0.031828687164775826,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.01389503767796512,
						"acc_stderr,none": 0.01389503767796512,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.013825416526895036,
						"acc_stderr,none": 0.013825416526895036,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.03216633903375033,
						"acc_stderr,none": 0.03216633903375033,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.283,
						"acc_norm,none": 0.283,
						"acc_norm_stderr,none": 0.014251810906481765,
						"acc_stderr,none": 0.014251810906481765,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.48541986406489807,
						"acc_norm,none": 0.462,
						"acc_norm_stderr,none": 0.0004981082164328657,
						"acc_stderr,none": 0.0366339172650086,
						"alias": "kobest",
						"f1,none": 0.39393512492621,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5035612535612536,
						"acc_stderr,none": 0.013348428901951027,
						"alias": " - kobest_boolq",
						"f1,none": 0.3554657321046679,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.015803979428161946,
						"alias": " - kobest_copa",
						"f1,none": 0.520896144717429,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.366,
						"acc_norm,none": 0.462,
						"acc_norm_stderr,none": 0.022318338119870523,
						"acc_stderr,none": 0.021564276850201618,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3618163879785442,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.47103274559193953,
						"acc_stderr,none": 0.025083743486632542,
						"alias": " - kobest_sentineg",
						"f1,none": 0.4598989375485877,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604038,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6634969920434698,
						"acc_stderr,none": 0.013626250353494346,
						"alias": "lambada",
						"perplexity,none": 4.581309477921781,
						"perplexity_stderr,none": 0.24450204428335026
					},
					"lambada_cloze": {
						"acc,none": 0.09441102270522025,
						"acc_stderr,none": 0.008464918604058229,
						"alias": "lambada_cloze",
						"perplexity,none": 202.80427845102082,
						"perplexity_stderr,none": 6.42809109784278
					},
					"lambada_multilingual": {
						"acc,none": 0.41979429458567824,
						"acc_stderr,none": 0.0786758552445782,
						"alias": "lambada_multilingual",
						"perplexity,none": 69.87405628148183,
						"perplexity_stderr,none": 21.087915987921274
					},
					"lambada_openai": {
						"acc,none": 0.687172520861634,
						"acc_stderr,none": 0.006459477837059417,
						"alias": " - lambada_openai",
						"perplexity,none": 4.133552279354633,
						"perplexity_stderr,none": 0.08746793214630093
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.07956530176596158,
						"acc_stderr,none": 0.0037702523650452176,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 201.40675886932158,
						"perplexity_stderr,none": 6.518722997817119
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3081699980593829,
						"acc_stderr,none": 0.006432902165497003,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 99.92414618865641,
						"perplexity_stderr,none": 5.700808935368859
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6883368911313797,
						"acc_stderr,none": 0.006452905350982656,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.133057321175017,
						"perplexity_stderr,none": 0.0870658360677331
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.33300989714729284,
						"acc_stderr,none": 0.006565991832762937,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 99.06944427500518,
						"perplexity_stderr,none": 5.386476720774928
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.40520085387153115,
						"acc_stderr,none": 0.006839626982658154,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 57.56632329787498,
						"perplexity_stderr,none": 3.1300259314596053
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.36425383271880457,
						"acc_stderr,none": 0.006704339729528898,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 88.6773103246975,
						"perplexity_stderr,none": 5.191034069516995
					},
					"lambada_standard": {
						"acc,none": 0.6396274015136814,
						"acc_stderr,none": 0.006688850414338584,
						"alias": " - lambada_standard",
						"perplexity,none": 5.0271105467898005,
						"perplexity_stderr,none": 0.11190006251917957
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.10925674364447895,
						"acc_stderr,none": 0.004346227651722471,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 204.20179803272006,
						"perplexity_stderr,none": 6.259254030853473
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.2684478371501272,
						"exact_match_stderr,get-answer": 0.011180584582096637
					},
					"logiqa": {
						"acc,none": 0.23348694316436253,
						"acc_norm,none": 0.27342549923195086,
						"acc_norm_stderr,none": 0.01748247454768128,
						"acc_stderr,none": 0.016593362460570887,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2506361323155216,
						"acc_norm,none": 0.2881679389312977,
						"acc_norm_stderr,none": 0.011426770634965258,
						"acc_stderr,none": 0.010934026494722665,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25996649916247905,
						"acc_norm,none": 0.2562814070351759,
						"acc_norm_stderr,none": 0.007992146938217008,
						"acc_stderr,none": 0.008029434758777935,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3644355009531879,
						"acc_stderr,none": 0.004953146288009649,
						"alias": "mc_taco",
						"f1,none": 0.5048271309513986,
						"f1_stderr,none": 0.005545744600310716
					},
					"medmcqa": {
						"acc,none": 0.24886445135070523,
						"acc_norm,none": 0.24886445135070523,
						"acc_norm_stderr,none": 0.006685726035149461,
						"acc_stderr,none": 0.006685726035149461,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.24587588373919875,
						"acc_norm,none": 0.24587588373919875,
						"acc_norm_stderr,none": 0.012073573380009434,
						"acc_stderr,none": 0.012073573380009434,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.2812277453354224,
						"acc_stderr,none": 0.04207303520666168,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.03785714465066652,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.34210526315789475,
						"acc_stderr,none": 0.03860731599316091,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.25660377358490566,
						"acc_stderr,none": 0.026880647889051992,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2847222222222222,
						"acc_stderr,none": 0.03773809990686935,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695236,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.048783173121456316,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.31213872832369943,
						"acc_stderr,none": 0.035331333893236574,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.19607843137254902,
						"acc_stderr,none": 0.03950581861179961,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.23404255319148937,
						"acc_stderr,none": 0.027678452578212404,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.04049339297748141,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2827586206896552,
						"acc_stderr,none": 0.03752833958003336,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2724867724867725,
						"acc_stderr,none": 0.02293097307163335,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.40476190476190477,
						"acc_stderr,none": 0.04390259265377562,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2645161290322581,
						"acc_stderr,none": 0.02509189237885928,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.29064039408866993,
						"acc_stderr,none": 0.0319474007226554,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.04461960433384741,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.23636363636363636,
						"acc_stderr,none": 0.03317505930009179,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.03173071239071724,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.32124352331606215,
						"acc_stderr,none": 0.033699508685490674,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2948717948717949,
						"acc_stderr,none": 0.02311936275823228,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.0273091405882302,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.029597329730978082,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.271523178807947,
						"acc_stderr,none": 0.036313298039696545,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.26788990825688075,
						"acc_stderr,none": 0.01898746225797865,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2916666666666667,
						"acc_stderr,none": 0.030998666304560524,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.02977177522814565,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.23628691983122363,
						"acc_stderr,none": 0.027652153144159267,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.2242152466367713,
						"acc_stderr,none": 0.027991534258519527,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.31297709923664124,
						"acc_stderr,none": 0.04066962905677698,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2741764080765144,
						"acc_stderr,none": 0.039627659898151486,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.3305785123966942,
						"acc_stderr,none": 0.04294340845212094,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2037037037037037,
						"acc_stderr,none": 0.03893542518824847,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2883435582822086,
						"acc_stderr,none": 0.03559039531617342,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.33035714285714285,
						"acc_stderr,none": 0.04464285714285715,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.24271844660194175,
						"acc_stderr,none": 0.04245022486384495,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.26495726495726496,
						"acc_stderr,none": 0.028911208802749475,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.30395913154533843,
						"acc_stderr,none": 0.016448321686769043,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.28034682080924855,
						"acc_stderr,none": 0.024182427496577612,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2446927374301676,
						"acc_stderr,none": 0.014378169884098409,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3562091503267974,
						"acc_stderr,none": 0.02742047766262925,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.28644995172191834,
						"acc_stderr,none": 0.042803745332077855,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3183279742765273,
						"acc_stderr,none": 0.026457225067811035,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.02563082497562135,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2978723404255319,
						"acc_stderr,none": 0.027281608344469414,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.27053455019556716,
						"acc_stderr,none": 0.011345996743539265,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.23897058823529413,
						"acc_stderr,none": 0.02590528064489301,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2679738562091503,
						"acc_stderr,none": 0.017917974069594722,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.21818181818181817,
						"acc_stderr,none": 0.03955932861795833,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3224489795918367,
						"acc_stderr,none": 0.029923100563683906,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.28501787455313626,
						"acc_stderr,none": 0.035416759074404516,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.27860696517412936,
						"acc_stderr,none": 0.031700561834973086,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.28290516967967017,
						"acc_stderr,none": 0.04961588475618063,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145633,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.25301204819277107,
						"acc_stderr,none": 0.03384429155233135,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3391812865497076,
						"acc_stderr,none": 0.03631053496488904,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3293937850229241,
						"acc_stderr,none": 0.004744259768938668,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.33482506102522375,
						"acc_stderr,none": 0.004759683441650661,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6838235294117647,
						"acc_stderr,none": 0.023048336668420193,
						"alias": "mrpc",
						"f1,none": 0.8122270742358079,
						"f1_stderr,none": 0.016275484057001473
					},
					"multimedqa": {
						"acc,none": 0.2828956706884315,
						"acc_norm,none": 0.24806941129661894,
						"acc_norm_stderr,none": 8.141668564682874e-05,
						"acc_stderr,none": 0.10602985794277944,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5693069306930693,
						"acc_stderr,none": 0.007112473596419731,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7011851015801354,
						"mrr_stderr,none": 0.010324117978090642,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407435,
						"r@2,none": 0.4153498871331828,
						"r@2_stderr,none": 0.016564694549772732
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.650018811136192,
						"mrr_stderr,none": 0.010430227876039155,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.01473704740275095,
						"r@2,none": 0.4672686230248307,
						"r@2_stderr,none": 0.016771264669080587
					},
					"openbookqa": {
						"acc,none": 0.292,
						"acc_norm,none": 0.422,
						"acc_norm_stderr,none": 0.022109039310618552,
						"acc_stderr,none": 0.02035437548053007,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.421,
						"acc_stderr,none": 0.011042665902539793,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.010957190790298967,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4115,
						"acc_stderr,none": 0.0110065638245373,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5225,
						"acc_stderr,none": 0.011171807357801175,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5515,
						"acc_stderr,none": 0.011123656901911276,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.537,
						"acc_stderr,none": 0.011152474561478175,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5185,
						"acc_stderr,none": 0.011175478542788575,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48028571428571426,
						"acc_stderr,none": 0.044735701540947824,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7878128400435256,
						"acc_norm,none": 0.7986942328618063,
						"acc_norm_stderr,none": 0.009355431098990435,
						"acc_stderr,none": 0.009539299828174048,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.26296968403074295,
						"acc_norm,none": 0.3005444064901793,
						"acc_norm_stderr,none": 0.0033497126231959236,
						"acc_stderr,none": 0.003216389750486752,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.02070404102172473,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7279173015692755,
						"acc_norm,none": 0.5992547792399183,
						"acc_norm_stderr,none": 0.01044896121196604,
						"acc_stderr,none": 0.14377926620120451,
						"alias": "pythia",
						"bits_per_byte,none": 0.6487896109299826,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5678522506594048,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.133552279354633,
						"perplexity_stderr,none": 0.08746793214630093,
						"word_perplexity,none": 11.076041195986809,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.4962474830679114,
						"acc_stderr,none": 0.006765220016415221,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5348750927529062,
						"acc_stderr,none": 0.0024806442473283203,
						"alias": "qqp",
						"f1,none": 0.4419053271998813,
						"f1_stderr,none": 0.0033851537428577754
					},
					"race": {
						"acc,none": 0.384688995215311,
						"acc_stderr,none": 0.015057468843874154,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5523465703971119,
						"acc_stderr,none": 0.02993107036293953,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.927,
						"acc_norm,none": 0.885,
						"acc_norm_stderr,none": 0.010093407594904614,
						"acc_stderr,none": 0.008230354715244059,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5523465703971119,
						"acc_stderr,none": 0.02993107036293953,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.5756880733944955,
						"acc_stderr,none": 0.016746619706066005,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5512346296111167,
						"acc_norm,none": 0.7490252924122763,
						"acc_norm_stderr,none": 0.003065447919018033,
						"acc_stderr,none": 0.003516483928816561,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.57458986389804,
						"acc_stderr,none": 0.028184359004039327,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.610176282051282,
						"acc_stderr,none": 0.004881252293013471,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.6110266545049153,
						"acc_stderr,none": 0.004908168268304811,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5045098039215686,
						"acc_stderr,none": 0.004950779022493218,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.30152155175145756,
						"acc_stderr,none": 0.00101315922710758,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2998776009791922,
						"bleu_acc_stderr,none": 0.016040352966713647,
						"bleu_diff,none": -9.095219004961898,
						"bleu_diff_stderr,none": 0.7804871290932788,
						"bleu_max,none": 26.594375993138968,
						"bleu_max_stderr,none": 0.7778671069690596,
						"rouge1_acc,none": 0.31701346389228885,
						"rouge1_acc_stderr,none": 0.016289203374403358,
						"rouge1_diff,none": -11.175921692345224,
						"rouge1_diff_stderr,none": 0.8195761806099192,
						"rouge1_max,none": 52.11636563362932,
						"rouge1_max_stderr,none": 0.829198117947217,
						"rouge2_acc,none": 0.2607099143206854,
						"rouge2_acc_stderr,none": 0.015368841620766373,
						"rouge2_diff,none": -13.49748922413662,
						"rouge2_diff_stderr,none": 1.0086880301697758,
						"rouge2_max,none": 36.46886632756582,
						"rouge2_max_stderr,none": 0.9690264435785303,
						"rougeL_acc,none": 0.3023255813953488,
						"rougeL_acc_stderr,none": 0.016077509266133033,
						"rougeL_diff,none": -11.330097263561317,
						"rougeL_diff_stderr,none": 0.8162709159821883,
						"rougeL_max,none": 49.2588863909637,
						"rougeL_max_stderr,none": 0.8341363234633794
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2998776009791922,
						"bleu_acc_stderr,none": 0.016040352966713647,
						"bleu_diff,none": -9.095219004961898,
						"bleu_diff_stderr,none": 0.7804871290932788,
						"bleu_max,none": 26.594375993138968,
						"bleu_max_stderr,none": 0.7778671069690596,
						"rouge1_acc,none": 0.31701346389228885,
						"rouge1_acc_stderr,none": 0.016289203374403358,
						"rouge1_diff,none": -11.175921692345224,
						"rouge1_diff_stderr,none": 0.8195761806099192,
						"rouge1_max,none": 52.11636563362932,
						"rouge1_max_stderr,none": 0.829198117947217,
						"rouge2_acc,none": 0.2607099143206854,
						"rouge2_acc_stderr,none": 0.015368841620766373,
						"rouge2_diff,none": -13.49748922413662,
						"rouge2_diff_stderr,none": 1.0086880301697758,
						"rouge2_max,none": 36.46886632756582,
						"rouge2_max_stderr,none": 0.9690264435785303,
						"rougeL_acc,none": 0.3023255813953488,
						"rougeL_acc_stderr,none": 0.016077509266133033,
						"rougeL_diff,none": -11.330097263561317,
						"rougeL_diff_stderr,none": 0.8162709159821883,
						"rougeL_max,none": 49.2588863909637,
						"rougeL_max_stderr,none": 0.8341363234633794
					},
					"truthfulqa_mc1": {
						"acc,none": 0.24479804161566707,
						"acc_stderr,none": 0.015051869486714997,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.35824506188724803,
						"acc_stderr,none": 0.013793166886968536,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.029035433070866142,
						"exact_match_stderr,none": 0.0037257257477226868
					},
					"wic": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.01981072129375818,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6487896109299826,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5678522506594048,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 11.076041195986809,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6724546172059984,
						"acc_stderr,none": 0.013190169546797016,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5774647887323944,
						"acc_stderr,none": 0.05903984205682581,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.3942307692307692,
						"acc_stderr,none": 0.04815154775990711,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8461538461538461,
						"acc_stderr,none": 0.021876786884404677,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5378181818181818,
						"acc_stderr,none": 0.03414712992987977,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.488,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.022221331534143022,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.022324981738385253,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.022318338119870527,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.022342748192502846,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.022221331534143008,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.02227969410784342,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.022252153078595897,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.021930844120728505,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.39435073627844713,
						"acc_stderr,none": 0.05742628184405443,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.009460223484996469,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3859437751004016,
						"acc_stderr,none": 0.009757838842063344,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4602409638554217,
						"acc_stderr,none": 0.009990337216722657,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3514056224899598,
						"acc_stderr,none": 0.009569263079823961,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5642570281124498,
						"acc_stderr,none": 0.009938966706641357,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4610441767068273,
						"acc_stderr,none": 0.009991608448389063,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4895582329317269,
						"acc_stderr,none": 0.010019887205677435,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.35943775100401604,
						"acc_stderr,none": 0.009617895762902744,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.41767068273092367,
						"acc_stderr,none": 0.00988527772784018,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3389558232931727,
						"acc_stderr,none": 0.009487992732201522,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3405622489959839,
						"acc_stderr,none": 0.00949888669027444,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3506024096385542,
						"acc_stderr,none": 0.009564237156206103,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3413654618473896,
						"acc_stderr,none": 0.00950428807888022,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.36586345381526103,
						"acc_stderr,none": 0.00965469276557259,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3534136546184739,
						"acc_stderr,none": 0.009581698005070973,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5533361410264123,
						"acc_stderr,none": 0.07308704147277036,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.46657842488418266,
						"acc_stderr,none": 0.012838347934731665,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7743216412971542,
						"acc_stderr,none": 0.01075764435168656,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6115155526141628,
						"acc_stderr,none": 0.012543019523160325,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5016545334215751,
						"acc_stderr,none": 0.012867054869163341,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5056254136333554,
						"acc_stderr,none": 0.01286631092307251,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5493050959629384,
						"acc_stderr,none": 0.012804412720126671,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.48974189278623426,
						"acc_stderr,none": 0.012864417047980477,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5519523494374586,
						"acc_stderr,none": 0.012797478885304733,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.514228987425546,
						"acc_stderr,none": 0.012861913999596129,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5459960291197882,
						"acc_stderr,none": 0.012812565368728933,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5757776307081403,
						"acc_stderr,none": 0.012718494399531067,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7473589570690042,
						"acc_stderr,none": 0.0647846885953495,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.864516129032258,
						"acc_stderr,none": 0.007099246998788207,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6265060240963856,
						"acc_stderr,none": 0.05341921480681956,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5766423357664233,
						"acc_stderr,none": 0.015963356799273146,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6159695817490495,
						"acc_stderr,none": 0.03004773912243715,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.02764654065504541,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7123015873015873,
						"acc_stderr,none": 0.020184439611834477,
						"alias": " - xwinograd_zh"
					}
				}
			},
			"dtype=float16,trust_remote_code=True": {
				"confObj": {
					"dtype": "float16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=float16,trust_remote_code=True",
				"groups": {
					"mmlu": {
						"acc,none": 0.2847884916678536,
						"acc_stderr,none": 0.05083377611198537,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2595111583421892,
						"acc_stderr,none": 0.030882438559226597,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3080141615706469,
						"acc_stderr,none": 0.05726215420355078,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2908677283067923,
						"acc_stderr,none": 0.054129411827228885,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.29368855058674276,
						"acc_stderr,none": 0.057050203747814526,
						"alias": " - stem"
					}
				},
				"results": {
					"arc_challenge": {
						"acc,none": 0.41723549488054607,
						"acc_norm,none": 0.4462457337883959,
						"acc_norm_stderr,none": 0.014526705548539978,
						"acc_stderr,none": 0.014409825518403077,
						"alias": "arc_challenge"
					},
					"hellaswag": {
						"acc,none": 0.5707030472017527,
						"acc_norm,none": 0.7716590320653256,
						"acc_norm_stderr,none": 0.004189055914281299,
						"acc_stderr,none": 0.004939642460172579,
						"alias": "hellaswag"
					},
					"mmlu": {
						"acc,none": 0.2847884916678536,
						"acc_stderr,none": 0.05083377611198537,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.2814814814814815,
						"acc_stderr,none": 0.038850042458002526,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.27631578947368424,
						"acc_stderr,none": 0.03639057569952925,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.27169811320754716,
						"acc_stderr,none": 0.027377706624670713,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.24305555555555555,
						"acc_stderr,none": 0.03586879280080342,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.24277456647398843,
						"acc_stderr,none": 0.0326926380614177,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.20588235294117646,
						"acc_stderr,none": 0.040233822736177476,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2851063829787234,
						"acc_stderr,none": 0.029513196625539355,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2982456140350877,
						"acc_stderr,none": 0.043036840335373173,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2896551724137931,
						"acc_stderr,none": 0.03780019230438014,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.25396825396825395,
						"acc_stderr,none": 0.022418042891113946,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.19047619047619047,
						"acc_stderr,none": 0.03512207412302052,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.34838709677419355,
						"acc_stderr,none": 0.027104826328100944,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.3251231527093596,
						"acc_stderr,none": 0.03295797566311271,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.040201512610368445,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.2545454545454545,
						"acc_stderr,none": 0.03401506715249039,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.23232323232323232,
						"acc_stderr,none": 0.030088629490217487,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.3316062176165803,
						"acc_stderr,none": 0.03397636541089116,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3435897435897436,
						"acc_stderr,none": 0.02407869658063547,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2740740740740741,
						"acc_stderr,none": 0.027195934804085622,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.31932773109243695,
						"acc_stderr,none": 0.030283995525884396,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.31125827814569534,
						"acc_stderr,none": 0.037804458505267334,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.24954128440366974,
						"acc_stderr,none": 0.018553897629501624,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.44907407407407407,
						"acc_stderr,none": 0.03392238405321617,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.22058823529411764,
						"acc_stderr,none": 0.02910225438967407,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2742616033755274,
						"acc_stderr,none": 0.029041333510598035,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3273542600896861,
						"acc_stderr,none": 0.03149384670994131,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.26717557251908397,
						"acc_stderr,none": 0.038808483010823944,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2595111583421892,
						"acc_stderr,none": 0.030882438559226597,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2644628099173554,
						"acc_stderr,none": 0.04026187527591205,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.21296296296296297,
						"acc_stderr,none": 0.03957835471980979,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2392638036809816,
						"acc_stderr,none": 0.0335195387952127,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.29464285714285715,
						"acc_stderr,none": 0.04327040932578728,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2912621359223301,
						"acc_stderr,none": 0.044986763205729245,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.28205128205128205,
						"acc_stderr,none": 0.02948036054954119,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.01632881442210205,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2630057803468208,
						"acc_stderr,none": 0.023703099525258165,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2424581005586592,
						"acc_stderr,none": 0.014333522059217889,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.34967320261437906,
						"acc_stderr,none": 0.027305308076274695,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3080141615706469,
						"acc_stderr,none": 0.05726215420355078,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3247588424437299,
						"acc_stderr,none": 0.026596782287697046,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3117283950617284,
						"acc_stderr,none": 0.025773111169630436,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.22695035460992907,
						"acc_stderr,none": 0.024987106365642973,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.258148631029987,
						"acc_stderr,none": 0.011176923719313402,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4485294117647059,
						"acc_stderr,none": 0.0302114796091216,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2238562091503268,
						"acc_stderr,none": 0.016863008585416613,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.32727272727272727,
						"acc_stderr,none": 0.04494290866252089,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.4122448979591837,
						"acc_stderr,none": 0.03151236044674281,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2908677283067923,
						"acc_stderr,none": 0.054129411827228885,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.3482587064676617,
						"acc_stderr,none": 0.033687874661154596,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.29368855058674276,
						"acc_stderr,none": 0.057050203747814526,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.044084400227680794,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.4036144578313253,
						"acc_stderr,none": 0.03819486140758398,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.03377310252209194,
						"alias": "  - world_religions"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.35846563712923685,
						"acc_stderr,none": 0.01380221224203312,
						"alias": "truthfulqa_mc2"
					},
					"winogrande": {
						"acc,none": 0.7048145224940805,
						"acc_stderr,none": 0.012819410741754775,
						"alias": "winogrande"
					}
				}
			}
		},
		"name": "allenai/OLMo-7B"
	},
	"bigscience/bloom-7b1": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5326944757609922,
						"acc_norm,none": 0.49492671927846665,
						"acc_norm_stderr,none": 0.038208130956824383,
						"acc_stderr,none": 0.05466218984844714,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3353125,
						"acc_stderr,none": 0.01446949103500938,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.008949999999999998,
						"acc_stderr,none": 0.008209021742641017,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8393432835820895,
						"acc_stderr,none": 0.14749702713952673,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2600297176820208,
						"acc_norm,none": 0.2600297176820208,
						"acc_norm_stderr,none": 0.11835357984205985,
						"acc_stderr,none": 0.11835357984205985,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.25461923674667586,
						"acc_norm,none": 0.25461923674667586,
						"acc_norm_stderr,none": 0.04055574314806645,
						"acc_stderr,none": 0.04055574314806645,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.232390429338104,
						"likelihood_diff_stderr,none": 0.45353783891613814,
						"pct_stereotype,none": 0.6106141920095408,
						"pct_stereotype_stderr,none": 0.06140619073838575
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.021161417322834646,
						"exact_match_stderr,none": 0.0031935443667984284
					},
					"glue": {
						"acc,none": 0.4109753326013436,
						"acc_stderr,none": 0.04395447277126908,
						"alias": "glue",
						"f1,none": 0.45530129840392874,
						"f1_stderr,none": 0.0012303593027047444,
						"mcc,none": -0.028771717657034023,
						"mcc_stderr,none": 0.0008834063675970467
					},
					"kmmlu": {
						"acc,none": 0.09696794686687843,
						"acc_norm,none": 0.09696794686687843,
						"acc_norm_stderr,none": 0.06545838667116215,
						"acc_stderr,none": 0.06545838667116215,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.4764306073229555,
						"acc_norm,none": 0.444,
						"acc_norm_stderr,none": 0.0004947174348697385,
						"acc_stderr,none": 0.039805756990886364,
						"alias": "kobest",
						"f1,none": 0.4021326241581926,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5747137589753541,
						"acc_stderr,none": 0.007010810636420737,
						"alias": "lambada",
						"perplexity,none": 6.958047049706398,
						"perplexity_stderr,none": 0.2647536497791857
					},
					"lambada_cloze": {
						"acc,none": 0.1674752571317679,
						"acc_stderr,none": 0.011609439689518901,
						"alias": "lambada_cloze",
						"perplexity,none": 185.66676218079564,
						"perplexity_stderr,none": 23.734859625696718
					},
					"lambada_multilingual": {
						"acc,none": 0.38490199883562976,
						"acc_stderr,none": 0.07608898792977997,
						"alias": "lambada_multilingual",
						"perplexity,none": 131.45396740665825,
						"perplexity_stderr,none": 95.28024178884175
					},
					"mmlu": {
						"acc,none": 0.2624270047001852,
						"acc_stderr,none": 0.040309744416101086,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2590860786397449,
						"acc_stderr,none": 0.04181966524292461,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.25941422594142255,
						"acc_stderr,none": 0.03678390469844913,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2713682157946051,
						"acc_stderr,none": 0.03672232511502534,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2616555661274976,
						"acc_stderr,none": 0.04402844485531509,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.27977288857345634,
						"acc_norm,none": 0.2453669891865909,
						"acc_norm_stderr,none": 8.89879687730782e-05,
						"acc_stderr,none": 0.09463051472444227,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5078571428571429,
						"acc_stderr,none": 0.03988534011535243,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7187471051029983,
						"acc_norm,none": 0.5022687591886176,
						"acc_norm_stderr,none": 0.004176892732703178,
						"acc_stderr,none": 0.14816415249964396,
						"alias": "pythia",
						"bits_per_byte,none": 0.756782382110838,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.689717870485216,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 6.581986537574138,
						"perplexity_stderr,none": 0.17477792177799384,
						"word_perplexity,none": 16.52818534984857,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.35106382978723405,
						"acc_norm,none": 0.3971631205673759,
						"acc_norm_stderr,none": 0.04753935337666359,
						"acc_stderr,none": 0.040881997002059974,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5447406076336895,
						"acc_stderr,none": 0.02856906246199001,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.33411149075668534,
						"acc_stderr,none": 0.052722485787617,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.19583843329253367,
						"bleu_acc_stderr,none": 0.00019299723203193512,
						"bleu_diff,none": -1.8214976706312949,
						"bleu_diff_stderr,none": 0.13865372230348294,
						"bleu_max,none": 6.433425824915947,
						"bleu_max_stderr,none": 0.16590804599147632,
						"rouge1_acc,none": 0.20563035495716034,
						"rouge1_acc_stderr,none": 0.00020017954911440305,
						"rouge1_diff,none": -3.2308274257746885,
						"rouge1_diff_stderr,none": 0.25517163532724385,
						"rouge1_max,none": 17.045475467984797,
						"rouge1_max_stderr,none": 0.6024886029445912,
						"rouge2_acc,none": 0.09179926560587515,
						"rouge2_acc_stderr,none": 0.00010217176524521813,
						"rouge2_diff,none": -3.6813286748262195,
						"rouge2_diff_stderr,none": 0.32889411116487677,
						"rouge2_max,none": 9.423257042100992,
						"rouge2_max_stderr,none": 0.4018387405742677,
						"rougeL_acc,none": 0.21052631578947367,
						"rougeL_acc_stderr,none": 0.00020368258106566656,
						"rougeL_diff,none": -3.154937419393459,
						"rougeL_diff_stderr,none": 0.25808985785927113,
						"rougeL_max,none": 16.178992153016562,
						"rougeL_max_stderr,none": 0.5560450337526428
					},
					"xcopa": {
						"acc,none": 0.5709090909090908,
						"acc_stderr,none": 0.06135942275478038,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.41204819277108434,
						"acc_stderr,none": 0.051535476594892576,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5927441188857469,
						"acc_stderr,none": 0.05262352730974911,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7442121825129242,
						"acc_stderr,none": 0.06414679137553342,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5326944757609922,
						"acc_norm,none": 0.49492671927846665,
						"acc_norm_stderr,none": 0.038208130956824383,
						"acc_stderr,none": 0.05466218984844714,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3353125,
						"acc_stderr,none": 0.01446949103500938,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.334,
						"acc_stderr,none": 0.014922019523732975,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.014933117490932573,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.33666666666666667,
						"acc_stderr,none": 0.013647602942406398,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.30204778156996587,
						"acc_norm,none": 0.3378839590443686,
						"acc_norm_stderr,none": 0.013822047922283512,
						"acc_stderr,none": 0.01341751914471642,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6464646464646465,
						"acc_norm,none": 0.5723905723905723,
						"acc_norm_stderr,none": 0.010151683397430677,
						"acc_stderr,none": 0.009809728948151495,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.008949999999999998,
						"acc_stderr,none": 0.008209021742641017,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0225,
						"acc_stderr,none": 0.00331698299484552,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0185,
						"acc_stderr,none": 0.0030138707185866863,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.032,
						"acc_stderr,none": 0.0039364638794147895,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0135,
						"acc_stderr,none": 0.0025811249685073444,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.002,
						"acc_stderr,none": 0.0009992493430694982,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000116,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000151,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"blimp": {
						"acc,none": 0.8393432835820895,
						"acc_stderr,none": 0.14749702713952673,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866435,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448825,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.003444977194099825,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386684,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653866,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.753,
						"acc_stderr,none": 0.01364467578131412,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.015712507211864214,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.818,
						"acc_stderr,none": 0.012207580637662162,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.00965801621852428,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767667,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.0042063872496115,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118759,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099204,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897893,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315148,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832011,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275285,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487912,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.747,
						"acc_stderr,none": 0.01375427861358708,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.01228519132638668,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849876,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704171,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.01001655286669684,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.003845749574503004,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.495,
						"acc_stderr,none": 0.015818508944436652,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.767,
						"acc_stderr,none": 0.013374972519220067,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.691,
						"acc_stderr,none": 0.014619600977206491,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.821,
						"acc_stderr,none": 0.012128730605719106,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426657,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747394,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996685,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074794,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.015431725053866608,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.866,
						"acc_stderr,none": 0.010777762298369678,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.484,
						"acc_stderr,none": 0.01581119837311488,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.012953717566737235,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.751,
						"acc_stderr,none": 0.013681600278702293,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410043,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.781,
						"acc_stderr,none": 0.013084731950262024,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847164,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.751,
						"acc_stderr,none": 0.013681600278702284,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817143,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.011828605831454272,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.014205696104091493,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.347,
						"acc_stderr,none": 0.015060472031706624,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.00823035471524408,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.00848457353011858,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611479,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877364,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.503,
						"acc_stderr,none": 0.015819015179246724,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639227,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.006199874066337077,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.014205696104091512,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.0109781838443578,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.01132816522334168,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.011598902298689004,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696846,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897896,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151087,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.0045364721513065165,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695458,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.414,
						"acc_stderr,none": 0.015583544104177522,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.353,
						"acc_stderr,none": 0.015120172605483694,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6302752293577981,
						"acc_stderr,none": 0.008443002801337146,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.42857142857142855,
						"acc_stderr,none": 0.06672848092813058,
						"alias": "cb",
						"f1,none": 0.21956970232832304,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2600297176820208,
						"acc_norm,none": 0.2600297176820208,
						"acc_norm_stderr,none": 0.11835357984205985,
						"acc_stderr,none": 0.11835357984205985,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.32653061224489793,
						"acc_norm,none": 0.32653061224489793,
						"acc_norm_stderr,none": 0.06768622021133469,
						"acc_stderr,none": 0.06768622021133469,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.07226812131946557,
						"acc_stderr,none": 0.07226812131946557,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522557,
						"acc_stderr,none": 0.11369720523522557,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.09090909090909091,
						"acc_norm,none": 0.09090909090909091,
						"acc_norm_stderr,none": 0.050819726761358854,
						"acc_stderr,none": 0.050819726761358854,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.23404255319148937,
						"acc_norm,none": 0.23404255319148937,
						"acc_norm_stderr,none": 0.062426763436828826,
						"acc_stderr,none": 0.062426763436828826,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252413,
						"acc_stderr,none": 0.09477598811252413,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.05443310539518174,
						"acc_stderr,none": 0.05443310539518174,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.06861056852129647,
						"acc_stderr,none": 0.06861056852129647,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.09523809523809523,
						"acc_norm,none": 0.09523809523809523,
						"acc_norm_stderr,none": 0.06563832739090583,
						"acc_stderr,none": 0.06563832739090583,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857374,
						"acc_stderr,none": 0.08982552969857374,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.0686105685212965,
						"acc_stderr,none": 0.0686105685212965,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.12903225806451613,
						"acc_norm,none": 0.12903225806451613,
						"acc_norm_stderr,none": 0.06120537406777507,
						"acc_stderr,none": 0.06120537406777507,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031763,
						"acc_stderr,none": 0.07633651333031763,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.05,
						"acc_norm,none": 0.05,
						"acc_norm_stderr,none": 0.04999999999999998,
						"acc_stderr,none": 0.04999999999999998,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.11433239009500591,
						"acc_stderr,none": 0.11433239009500591,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.10094660663590604,
						"acc_stderr,none": 0.10094660663590604,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271772,
						"acc_stderr,none": 0.10163945352271772,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252413,
						"acc_stderr,none": 0.09477598811252413,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755133,
						"acc_stderr,none": 0.08780518530755133,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.1486470975026408,
						"acc_stderr,none": 0.1486470975026408,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.5217391304347826,
						"acc_norm,none": 0.5217391304347826,
						"acc_norm_stderr,none": 0.10649955403405124,
						"acc_stderr,none": 0.10649955403405124,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.06652247352247599,
						"acc_stderr,none": 0.06652247352247599,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.0914486154730632,
						"acc_stderr,none": 0.0914486154730632,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.20689655172413793,
						"acc_norm,none": 0.20689655172413793,
						"acc_norm_stderr,none": 0.07655305550699536,
						"acc_stderr,none": 0.07655305550699536,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.3469387755102041,
						"acc_norm,none": 0.3469387755102041,
						"acc_norm_stderr,none": 0.06870411522695291,
						"acc_stderr,none": 0.06870411522695291,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.06390760676613884,
						"acc_stderr,none": 0.06390760676613884,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.2391304347826087,
						"acc_norm,none": 0.2391304347826087,
						"acc_norm_stderr,none": 0.06358669845936323,
						"acc_stderr,none": 0.06358669845936323,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.25461923674667586,
						"acc_norm,none": 0.25461923674667586,
						"acc_norm_stderr,none": 0.04055574314806645,
						"acc_stderr,none": 0.04055574314806645,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.27702702702702703,
						"acc_norm,none": 0.27702702702702703,
						"acc_norm_stderr,none": 0.036911647897386525,
						"acc_stderr,none": 0.036911647897386525,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.0344500028917346,
						"acc_stderr,none": 0.0344500028917346,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.03541088558070896,
						"acc_stderr,none": 0.03541088558070896,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03346409881055953,
						"acc_stderr,none": 0.03346409881055953,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.030880282749398028,
						"acc_stderr,none": 0.030880282749398028,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2366412213740458,
						"acc_norm,none": 0.2366412213740458,
						"acc_norm_stderr,none": 0.03727673575596919,
						"acc_stderr,none": 0.03727673575596919,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037267799624996496,
						"acc_stderr,none": 0.037267799624996496,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.24299065420560748,
						"acc_norm,none": 0.24299065420560748,
						"acc_norm_stderr,none": 0.041657429989652724,
						"acc_stderr,none": 0.041657429989652724,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.23529411764705882,
						"acc_norm,none": 0.23529411764705882,
						"acc_norm_stderr,none": 0.023638766707521772,
						"acc_stderr,none": 0.023638766707521772,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.23039215686274508,
						"acc_norm,none": 0.23039215686274508,
						"acc_norm_stderr,none": 0.02955429260569508,
						"acc_stderr,none": 0.02955429260569508,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2681564245810056,
						"acc_norm,none": 0.2681564245810056,
						"acc_norm_stderr,none": 0.03320421630673713,
						"acc_stderr,none": 0.03320421630673713,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422644,
						"acc_stderr,none": 0.028146970599422644,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.3113207547169811,
						"acc_norm,none": 0.3113207547169811,
						"acc_norm_stderr,none": 0.0451874553177075,
						"acc_stderr,none": 0.0451874553177075,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.14953271028037382,
						"acc_norm,none": 0.14953271028037382,
						"acc_norm_stderr,none": 0.034637295696622716,
						"acc_stderr,none": 0.034637295696622716,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.04396093377439377,
						"acc_stderr,none": 0.04396093377439377,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.32407407407407407,
						"acc_norm,none": 0.32407407407407407,
						"acc_norm_stderr,none": 0.04524596007030048,
						"acc_stderr,none": 0.04524596007030048,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.04513676718168311,
						"acc_stderr,none": 0.04513676718168311,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.19811320754716982,
						"acc_norm,none": 0.19811320754716982,
						"acc_norm_stderr,none": 0.03889722288318549,
						"acc_stderr,none": 0.03889722288318549,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.21978021978021978,
						"acc_norm,none": 0.21978021978021978,
						"acc_norm_stderr,none": 0.025108358900325773,
						"acc_stderr,none": 0.025108358900325773,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.25980392156862747,
						"acc_norm,none": 0.25980392156862747,
						"acc_norm_stderr,none": 0.030778554678693268,
						"acc_stderr,none": 0.030778554678693268,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.25146198830409355,
						"acc_norm,none": 0.25146198830409355,
						"acc_norm_stderr,none": 0.033275044238468436,
						"acc_stderr,none": 0.033275044238468436,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.03558926157606755,
						"acc_stderr,none": 0.03558926157606755,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.03694846055443904,
						"acc_stderr,none": 0.03694846055443904,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.20754716981132076,
						"acc_norm,none": 0.20754716981132076,
						"acc_norm_stderr,none": 0.03226387858712916,
						"acc_stderr,none": 0.03226387858712916,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.24539877300613497,
						"acc_norm,none": 0.24539877300613497,
						"acc_norm_stderr,none": 0.03380939813943354,
						"acc_stderr,none": 0.03380939813943354,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.03336605189761063,
						"acc_stderr,none": 0.03336605189761063,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.23015873015873015,
						"acc_norm,none": 0.23015873015873015,
						"acc_norm_stderr,none": 0.026569137736133553,
						"acc_stderr,none": 0.026569137736133553,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03115626951964684,
						"acc_stderr,none": 0.03115626951964684,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.23949579831932774,
						"acc_norm,none": 0.23949579831932774,
						"acc_norm_stderr,none": 0.02772206549336127,
						"acc_stderr,none": 0.02772206549336127,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.27391304347826084,
						"acc_norm,none": 0.27391304347826084,
						"acc_norm_stderr,none": 0.029470189815005897,
						"acc_stderr,none": 0.029470189815005897,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03820169914517905,
						"acc_stderr,none": 0.03820169914517905,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695623,
						"acc_stderr,none": 0.03737392962695623,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2897727272727273,
						"acc_norm,none": 0.2897727272727273,
						"acc_norm_stderr,none": 0.03429323080239875,
						"acc_stderr,none": 0.03429323080239875,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2684563758389262,
						"acc_norm,none": 0.2684563758389262,
						"acc_norm_stderr,none": 0.036427227538629016,
						"acc_stderr,none": 0.036427227538629016,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676976,
						"acc_stderr,none": 0.03410167836676976,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.2196969696969697,
						"acc_norm,none": 0.2196969696969697,
						"acc_norm_stderr,none": 0.036174957725402315,
						"acc_stderr,none": 0.036174957725402315,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.03980329854920432,
						"acc_stderr,none": 0.03980329854920432,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.035637888362588285,
						"acc_stderr,none": 0.035637888362588285,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.040139645540727756,
						"acc_stderr,none": 0.040139645540727756,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.03822127078536156,
						"acc_stderr,none": 0.03822127078536156,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.03932537680392871,
						"acc_stderr,none": 0.03932537680392871,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.031629303956979486,
						"acc_stderr,none": 0.031629303956979486,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2895377128953771,
						"acc_norm,none": 0.2895377128953771,
						"acc_norm_stderr,none": 0.022399130302514097,
						"acc_stderr,none": 0.022399130302514097,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2757009345794392,
						"acc_norm,none": 0.2757009345794392,
						"acc_norm_stderr,none": 0.030618808026055624,
						"acc_stderr,none": 0.030618808026055624,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.21951219512195122,
						"acc_norm,none": 0.21951219512195122,
						"acc_norm_stderr,none": 0.03747420876084759,
						"acc_stderr,none": 0.03747420876084759,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.26229508196721313,
						"acc_norm,none": 0.26229508196721313,
						"acc_norm_stderr,none": 0.03998929318926594,
						"acc_stderr,none": 0.03998929318926594,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.02985642316467189,
						"acc_stderr,none": 0.02985642316467189,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03305282343736877,
						"acc_stderr,none": 0.03305282343736877,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24338624338624337,
						"acc_norm,none": 0.24338624338624337,
						"acc_norm_stderr,none": 0.03129725192855851,
						"acc_stderr,none": 0.03129725192855851,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.22413793103448276,
						"acc_norm,none": 0.22413793103448276,
						"acc_norm_stderr,none": 0.03888669370117824,
						"acc_stderr,none": 0.03888669370117824,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2620689655172414,
						"acc_norm,none": 0.2620689655172414,
						"acc_norm_stderr,none": 0.036646663372252565,
						"acc_stderr,none": 0.036646663372252565,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.041175810978450994,
						"acc_stderr,none": 0.041175810978450994,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26285714285714284,
						"acc_norm,none": 0.26285714285714284,
						"acc_norm_stderr,none": 0.03337037585221274,
						"acc_stderr,none": 0.03337037585221274,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.23222748815165878,
						"acc_norm,none": 0.23222748815165878,
						"acc_norm_stderr,none": 0.02913824862358175,
						"acc_stderr,none": 0.02913824862358175,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.022360679774997897,
						"acc_stderr,none": 0.022360679774997897,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.27155172413793105,
						"acc_norm,none": 0.27155172413793105,
						"acc_norm_stderr,none": 0.029263054233931905,
						"acc_stderr,none": 0.029263054233931905,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.22988505747126436,
						"acc_norm,none": 0.22988505747126436,
						"acc_norm_stderr,none": 0.03198969467577206,
						"acc_stderr,none": 0.03198969467577206,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.24336283185840707,
						"acc_norm,none": 0.24336283185840707,
						"acc_norm_stderr,none": 0.0286074865048577,
						"acc_stderr,none": 0.0286074865048577,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.035014387062967806,
						"acc_stderr,none": 0.035014387062967806,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.03209281645145385,
						"acc_stderr,none": 0.03209281645145385,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676976,
						"acc_stderr,none": 0.03410167836676976,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2484472049689441,
						"acc_norm,none": 0.2484472049689441,
						"acc_norm_stderr,none": 0.03416149068322981,
						"acc_stderr,none": 0.03416149068322981,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.23125,
						"acc_norm,none": 0.23125,
						"acc_norm_stderr,none": 0.03343758265727744,
						"acc_stderr,none": 0.03343758265727744,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.028771717657034023,
						"mcc_stderr,none": 0.0297221528089243
					},
					"copa": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.04461960433384741,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.232390429338104,
						"likelihood_diff_stderr,none": 0.45353783891613814,
						"pct_stereotype,none": 0.6106141920095408,
						"pct_stereotype_stderr,none": 0.06140619073838575
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.4746571258199164,
						"likelihood_diff_stderr,none": 0.08578932615071705,
						"pct_stereotype,none": 0.6129994036970781,
						"pct_stereotype_stderr,none": 0.011897311592496126
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.4368131868131866,
						"likelihood_diff_stderr,none": 0.33765159410512313,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.795454545454546,
						"likelihood_diff_stderr,none": 1.8774433116421254,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.088461538461538,
						"likelihood_diff_stderr,none": 0.6808778705143921,
						"pct_stereotype,none": 0.7076923076923077,
						"pct_stereotype_stderr,none": 0.05685286730420954
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.803515625,
						"likelihood_diff_stderr,none": 0.18292499522630842,
						"pct_stereotype,none": 0.65,
						"pct_stereotype_stderr,none": 0.02670517073902783
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.3003472222222223,
						"likelihood_diff_stderr,none": 0.23004292651499866,
						"pct_stereotype,none": 0.5046296296296297,
						"pct_stereotype_stderr,none": 0.03409825519163572
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.7378472222222223,
						"likelihood_diff_stderr,none": 0.3139165831078793,
						"pct_stereotype,none": 0.7222222222222222,
						"pct_stereotype_stderr,none": 0.053156331218399945
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.2837106299212597,
						"likelihood_diff_stderr,none": 0.14416009885120143,
						"pct_stereotype,none": 0.5334645669291339,
						"pct_stereotype_stderr,none": 0.022155988267174086
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.3378378378378377,
						"likelihood_diff_stderr,none": 0.32741571566830974,
						"pct_stereotype,none": 0.6756756756756757,
						"pct_stereotype_stderr,none": 0.04463366615377136
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.116935483870968,
						"likelihood_diff_stderr,none": 0.42903768446444784,
						"pct_stereotype,none": 0.7204301075268817,
						"pct_stereotype_stderr,none": 0.046789371667506734
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 3.9322368421052634,
						"likelihood_diff_stderr,none": 0.23499709226011173,
						"pct_stereotype,none": 0.6894736842105263,
						"pct_stereotype_stderr,none": 0.03365713545671698
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 2.987887596899225,
						"likelihood_diff_stderr,none": 0.07105551999160453,
						"pct_stereotype,none": 0.6064400715563506,
						"pct_stereotype_stderr,none": 0.011933349890055877
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 2.7708333333333335,
						"likelihood_diff_stderr,none": 0.27052344703049014,
						"pct_stereotype,none": 0.5333333333333333,
						"pct_stereotype_stderr,none": 0.05288198530254015
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 1.7692307692307692,
						"likelihood_diff_stderr,none": 0.4446184067415701,
						"pct_stereotype,none": 0.8461538461538461,
						"pct_stereotype_stderr,none": 0.10415433852097383
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.340909090909091,
						"likelihood_diff_stderr,none": 0.4061243122276867,
						"pct_stereotype,none": 0.7121212121212122,
						"pct_stereotype_stderr,none": 0.05615974350262317
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.6148753894080996,
						"likelihood_diff_stderr,none": 0.1372423809735031,
						"pct_stereotype,none": 0.6292834890965732,
						"pct_stereotype_stderr,none": 0.027000334456667868
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.218873517786561,
						"likelihood_diff_stderr,none": 0.18243756076848436,
						"pct_stereotype,none": 0.4426877470355731,
						"pct_stereotype_stderr,none": 0.031289438964526774
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.4878472222222223,
						"likelihood_diff_stderr,none": 0.48905493987630755,
						"pct_stereotype,none": 0.6805555555555556,
						"pct_stereotype_stderr,none": 0.05533504751887218
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.6902173913043477,
						"likelihood_diff_stderr,none": 0.12661490653357885,
						"pct_stereotype,none": 0.5608695652173913,
						"pct_stereotype_stderr,none": 0.02316441640598207
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 2.9402173913043477,
						"likelihood_diff_stderr,none": 0.23687590039198891,
						"pct_stereotype,none": 0.7478260869565218,
						"pct_stereotype_stderr,none": 0.04067222754154718
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 2.9862637362637363,
						"likelihood_diff_stderr,none": 0.2861957499435561,
						"pct_stereotype,none": 0.7912087912087912,
						"pct_stereotype_stderr,none": 0.042843052065094325
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.5870535714285716,
						"likelihood_diff_stderr,none": 0.2542396166949862,
						"pct_stereotype,none": 0.6836734693877551,
						"pct_stereotype_stderr,none": 0.03330234893102004
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.021161417322834646,
						"exact_match_stderr,none": 0.0031935443667984284
					},
					"glue": {
						"acc,none": 0.4109753326013436,
						"acc_stderr,none": 0.04395447277126908,
						"alias": "glue",
						"f1,none": 0.45530129840392874,
						"f1_stderr,none": 0.0012303593027047444,
						"mcc,none": -0.028771717657034023,
						"mcc_stderr,none": 0.0008834063675970467
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.013646702047005308,
						"exact_match_stderr,get-answer": 0.003195747075480839
					},
					"hellaswag": {
						"acc,none": 0.4645488946425015,
						"acc_norm,none": 0.6227843059151563,
						"acc_norm_stderr,none": 0.004836990373261561,
						"acc_stderr,none": 0.004977223485342033,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.09696794686687843,
						"acc_norm,none": 0.09696794686687843,
						"acc_norm_stderr,none": 0.06545838667116215,
						"acc_stderr,none": 0.06545838667116215,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.03861229196653697,
						"acc_stderr,none": 0.03861229196653697,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.088,
						"acc_norm,none": 0.088,
						"acc_norm_stderr,none": 0.008963053962592081,
						"acc_stderr,none": 0.008963053962592081,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.075,
						"acc_norm,none": 0.075,
						"acc_norm_stderr,none": 0.008333333333333378,
						"acc_stderr,none": 0.008333333333333378,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.193,
						"acc_norm,none": 0.193,
						"acc_norm_stderr,none": 0.012486268734370098,
						"acc_stderr,none": 0.012486268734370098,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.012535235623319325,
						"acc_stderr,none": 0.012535235623319325,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.175,
						"acc_norm,none": 0.175,
						"acc_norm_stderr,none": 0.01552503498177411,
						"acc_stderr,none": 0.01552503498177411,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.007,
						"acc_norm,none": 0.007,
						"acc_norm_stderr,none": 0.0026377941462437785,
						"acc_stderr,none": 0.0026377941462437785,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.002,
						"acc_norm,none": 0.002,
						"acc_norm_stderr,none": 0.0014135055705578176,
						"acc_stderr,none": 0.0014135055705578176,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.019,
						"acc_norm,none": 0.019,
						"acc_norm_stderr,none": 0.004319451082910625,
						"acc_stderr,none": 0.004319451082910625,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.033,
						"acc_norm,none": 0.033,
						"acc_norm_stderr,none": 0.005651808820452374,
						"acc_stderr,none": 0.005651808820452374,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.040347329239296424,
						"acc_stderr,none": 0.040347329239296424,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.018,
						"acc_norm,none": 0.018,
						"acc_norm_stderr,none": 0.004206387249611468,
						"acc_stderr,none": 0.004206387249611468,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.015,
						"acc_norm,none": 0.015,
						"acc_norm_stderr,none": 0.003845749574502999,
						"acc_stderr,none": 0.003845749574502999,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.177,
						"acc_norm,none": 0.177,
						"acc_norm_stderr,none": 0.012075463420375061,
						"acc_stderr,none": 0.012075463420375061,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.018,
						"acc_norm,none": 0.018,
						"acc_norm_stderr,none": 0.004206387249611491,
						"acc_stderr,none": 0.004206387249611491,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.131,
						"acc_norm,none": 0.131,
						"acc_norm_stderr,none": 0.010674874844837956,
						"acc_stderr,none": 0.010674874844837956,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.11,
						"acc_norm,none": 0.11,
						"acc_norm_stderr,none": 0.009899393819724432,
						"acc_stderr,none": 0.009899393819724432,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.08,
						"acc_norm,none": 0.08,
						"acc_norm_stderr,none": 0.008583336977753655,
						"acc_stderr,none": 0.008583336977753655,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.07,
						"acc_norm,none": 0.07,
						"acc_norm_stderr,none": 0.008072494358323485,
						"acc_stderr,none": 0.008072494358323485,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.018,
						"acc_norm,none": 0.018,
						"acc_norm_stderr,none": 0.0042063872496114615,
						"acc_stderr,none": 0.0042063872496114615,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.028,
						"acc_norm,none": 0.028,
						"acc_norm_stderr,none": 0.005219506034410047,
						"acc_stderr,none": 0.005219506034410047,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.057,
						"acc_norm,none": 0.057,
						"acc_norm_stderr,none": 0.007335175853706822,
						"acc_stderr,none": 0.007335175853706822,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.231,
						"acc_norm,none": 0.231,
						"acc_norm_stderr,none": 0.013334797216936426,
						"acc_stderr,none": 0.013334797216936426,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.07,
						"acc_norm,none": 0.07,
						"acc_norm_stderr,none": 0.008072494358323494,
						"acc_stderr,none": 0.008072494358323494,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.186,
						"acc_norm,none": 0.186,
						"acc_norm_stderr,none": 0.012310790208412808,
						"acc_stderr,none": 0.012310790208412808,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.14,
						"acc_norm,none": 0.14,
						"acc_norm_stderr,none": 0.014177505755565045,
						"acc_stderr,none": 0.014177505755565045,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.102,
						"acc_norm,none": 0.102,
						"acc_norm_stderr,none": 0.009575368801653897,
						"acc_stderr,none": 0.009575368801653897,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.105,
						"acc_norm,none": 0.105,
						"acc_norm_stderr,none": 0.009698921026024952,
						"acc_stderr,none": 0.009698921026024952,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.057,
						"acc_norm,none": 0.057,
						"acc_norm_stderr,none": 0.007335175853706827,
						"acc_stderr,none": 0.007335175853706827,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.078,
						"acc_norm,none": 0.078,
						"acc_norm_stderr,none": 0.008484573530118583,
						"acc_stderr,none": 0.008484573530118583,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22666666666666666,
						"acc_norm,none": 0.22666666666666666,
						"acc_norm_stderr,none": 0.024212609617951908,
						"acc_stderr,none": 0.024212609617951908,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.013394902889660013,
						"acc_stderr,none": 0.013394902889660013,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.039,
						"acc_norm,none": 0.039,
						"acc_norm_stderr,none": 0.006125072776426111,
						"acc_stderr,none": 0.006125072776426111,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.113,
						"acc_norm,none": 0.113,
						"acc_norm_stderr,none": 0.010016552866696839,
						"acc_stderr,none": 0.010016552866696839,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.027234326551496862,
						"acc_stderr,none": 0.027234326551496862,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.135,
						"acc_norm,none": 0.135,
						"acc_norm_stderr,none": 0.010811655372416054,
						"acc_stderr,none": 0.010811655372416054,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.151,
						"acc_norm,none": 0.151,
						"acc_norm_stderr,none": 0.011328165223341676,
						"acc_stderr,none": 0.011328165223341676,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.019,
						"acc_norm,none": 0.019,
						"acc_norm_stderr,none": 0.004319451082910608,
						"acc_stderr,none": 0.004319451082910608,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.4764306073229555,
						"acc_norm,none": 0.444,
						"acc_norm_stderr,none": 0.0004947174348697385,
						"acc_stderr,none": 0.039805756990886364,
						"alias": "kobest",
						"f1,none": 0.4021326241581926,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5071225071225072,
						"acc_stderr,none": 0.013347413060911655,
						"alias": " - kobest_boolq",
						"f1,none": 0.4443203967898094,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.493,
						"acc_stderr,none": 0.015817749561843578,
						"alias": " - kobest_copa",
						"f1,none": 0.4923781632499812,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.338,
						"acc_norm,none": 0.444,
						"acc_norm_stderr,none": 0.022242244375731027,
						"acc_stderr,none": 0.021175665695209407,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.33643809850112993,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.4609571788413098,
						"acc_stderr,none": 0.02504922719608602,
						"alias": " - kobest_sentineg",
						"f1,none": 0.3381683908762308,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4888888888888889,
						"acc_stderr,none": 0.014088017407699532,
						"alias": " - kobest_wic",
						"f1,none": 0.3297230142969956,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5747137589753541,
						"acc_stderr,none": 0.007010810636420737,
						"alias": "lambada",
						"perplexity,none": 6.958047049706398,
						"perplexity_stderr,none": 0.2647536497791857
					},
					"lambada_cloze": {
						"acc,none": 0.1674752571317679,
						"acc_stderr,none": 0.011609439689518901,
						"alias": "lambada_cloze",
						"perplexity,none": 185.66676218079564,
						"perplexity_stderr,none": 23.734859625696718
					},
					"lambada_multilingual": {
						"acc,none": 0.38490199883562976,
						"acc_stderr,none": 0.07608898792977997,
						"alias": "lambada_multilingual",
						"perplexity,none": 131.45396740665825,
						"perplexity_stderr,none": 95.28024178884175
					},
					"lambada_openai": {
						"acc,none": 0.5736464195614205,
						"acc_stderr,none": 0.006889999234952318,
						"alias": " - lambada_openai",
						"perplexity,none": 6.581986537574138,
						"perplexity_stderr,none": 0.17477792177799384
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.18823986027556763,
						"acc_stderr,none": 0.005446051323237008,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 139.8473073449582,
						"perplexity_stderr,none": 4.660459022368114
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.23015718998641568,
						"acc_stderr,none": 0.0058644241714399855,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 370.91952810475857,
						"perplexity_stderr,none": 24.98299339282566
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.5717058024451775,
						"acc_stderr,none": 0.0068939712541951454,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 6.583236525584539,
						"perplexity_stderr,none": 0.17481189179976453
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.36638851154667185,
						"acc_stderr,none": 0.0067126579546010565,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 51.02874715706533,
						"perplexity_stderr,none": 2.6341920857292744
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.4513875412381137,
						"acc_stderr,none": 0.0069329758883686235,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 29.56217917543056,
						"perplexity_stderr,none": 1.5411073949753211
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.30487094896176986,
						"acc_stderr,none": 0.006413613926848421,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 199.1761460704524,
						"perplexity_stderr,none": 13.648756866456297
					},
					"lambada_standard": {
						"acc,none": 0.5773335920822822,
						"acc_stderr,none": 0.006882153471156971,
						"alias": " - lambada_standard",
						"perplexity,none": 7.329823658540487,
						"perplexity_stderr,none": 0.20118710090523517
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.14671065398796818,
						"acc_stderr,none": 0.004929365951015958,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 231.48621701663308,
						"perplexity_stderr,none": 7.427086926691848
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.26908396946564883,
						"exact_match_stderr,get-answer": 0.01118895594325501
					},
					"logiqa": {
						"acc,none": 0.19969278033794163,
						"acc_norm,none": 0.282642089093702,
						"acc_norm_stderr,none": 0.017661585370360625,
						"acc_stderr,none": 0.015680245966420592,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.22709923664122136,
						"acc_norm,none": 0.27353689567430023,
						"acc_norm_stderr,none": 0.011246739746251145,
						"acc_stderr,none": 0.010570161254615025,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2489112227805695,
						"acc_norm,none": 0.2552763819095477,
						"acc_norm_stderr,none": 0.007981848348968281,
						"acc_stderr,none": 0.007915319798861365,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.36623596695615335,
						"acc_stderr,none": 0.004958328450343275,
						"alias": "mc_taco",
						"f1,none": 0.5067589845037916,
						"f1_stderr,none": 0.005560980455529145
					},
					"medmcqa": {
						"acc,none": 0.2522113315802056,
						"acc_norm,none": 0.2522113315802056,
						"acc_norm_stderr,none": 0.006715521180494484,
						"acc_stderr,none": 0.006715521180494484,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.2333071484681854,
						"acc_norm,none": 0.2333071484681854,
						"acc_norm_stderr,none": 0.011858538671340648,
						"acc_stderr,none": 0.011858538671340648,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.2624270047001852,
						"acc_stderr,none": 0.040309744416101086,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.24444444444444444,
						"acc_stderr,none": 0.037125378336148665,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.29605263157894735,
						"acc_stderr,none": 0.037150621549989056,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2830188679245283,
						"acc_stderr,none": 0.027724236492700907,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2708333333333333,
						"acc_stderr,none": 0.03716177437566016,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909283,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.26011560693641617,
						"acc_stderr,none": 0.03345036916788992,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.19607843137254902,
						"acc_stderr,none": 0.03950581861179964,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2851063829787234,
						"acc_stderr,none": 0.029513196625539345,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.0414243971948936,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2482758620689655,
						"acc_stderr,none": 0.036001056927277716,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.21164021164021163,
						"acc_stderr,none": 0.02103733150526289,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.04006168083848877,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.039427724440366234,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2806451612903226,
						"acc_stderr,none": 0.025560604721022884,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2660098522167488,
						"acc_stderr,none": 0.03108982600293752,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.1696969696969697,
						"acc_stderr,none": 0.02931118867498312,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.30303030303030304,
						"acc_stderr,none": 0.032742879140268674,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.31088082901554404,
						"acc_stderr,none": 0.03340361906276585,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.29743589743589743,
						"acc_stderr,none": 0.02317740813146594,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24444444444444444,
						"acc_stderr,none": 0.02620276653465215,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.28991596638655465,
						"acc_stderr,none": 0.029472485833136084,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2781456953642384,
						"acc_stderr,none": 0.03658603262763743,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.27522935779816515,
						"acc_stderr,none": 0.019149093743155196,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.26851851851851855,
						"acc_stderr,none": 0.030225226160012393,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.02977177522814563,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.21940928270042195,
						"acc_stderr,none": 0.026939106581553945,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.21524663677130046,
						"acc_stderr,none": 0.027584066602208253,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.22900763358778625,
						"acc_stderr,none": 0.036853466317118506,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2590860786397449,
						"acc_stderr,none": 0.04181966524292461,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4132231404958678,
						"acc_stderr,none": 0.04495087843548408,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.16666666666666666,
						"acc_stderr,none": 0.03602814176392644,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.22085889570552147,
						"acc_stderr,none": 0.032591773927421776,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.04432804055291519,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2815533980582524,
						"acc_stderr,none": 0.044532548363264673,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.27350427350427353,
						"acc_stderr,none": 0.029202540153431187,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2554278416347382,
						"acc_stderr,none": 0.015594955384455765,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.26011560693641617,
						"acc_stderr,none": 0.023618678310069363,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.25921787709497207,
						"acc_stderr,none": 0.014655780837497736,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.026090162504279035,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.25941422594142255,
						"acc_stderr,none": 0.03678390469844913,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2540192926045016,
						"acc_stderr,none": 0.02472386150477169,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2345679012345679,
						"acc_stderr,none": 0.023576881744005716,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25177304964539005,
						"acc_stderr,none": 0.025892151156709405,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2803129074315515,
						"acc_stderr,none": 0.011471555944958616,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.27205882352941174,
						"acc_stderr,none": 0.027033041151681456,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.24019607843137256,
						"acc_stderr,none": 0.017282760695167418,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.04389311454644287,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2897959183673469,
						"acc_stderr,none": 0.029043088683304345,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2713682157946051,
						"acc_stderr,none": 0.03672232511502534,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.21890547263681592,
						"acc_stderr,none": 0.029239174636647,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2616555661274976,
						"acc_stderr,none": 0.04402844485531509,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.1927710843373494,
						"acc_stderr,none": 0.03070982405056527,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.033773102522091945,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.356698930208864,
						"acc_stderr,none": 0.00483542902895954,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.35343775427176566,
						"acc_stderr,none": 0.004821284862489386,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6838235294117647,
						"acc_stderr,none": 0.02304833666842021,
						"alias": "mrpc",
						"f1,none": 0.8122270742358079,
						"f1_stderr,none": 0.016218335300780515
					},
					"multimedqa": {
						"acc,none": 0.27977288857345634,
						"acc_norm,none": 0.2453669891865909,
						"acc_norm_stderr,none": 8.89879687730782e-05,
						"acc_stderr,none": 0.09463051472444227,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5719884488448845,
						"acc_stderr,none": 0.007106976252751528,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6718397308687594,
						"mrr_stderr,none": 0.010418310386991586,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.42437923250564336,
						"r@2_stderr,none": 0.01661397885056347
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6261286702234105,
						"mrr_stderr,none": 0.01037646656613771,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4616252821670429,
						"r@2_stderr,none": 0.016757741478801026
					},
					"openbookqa": {
						"acc,none": 0.252,
						"acc_norm,none": 0.354,
						"acc_norm_stderr,none": 0.021407582047916447,
						"acc_stderr,none": 0.01943572728224953,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.5175,
						"acc_stderr,none": 0.011176284251254179,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4145,
						"acc_stderr,none": 0.011018419931591767,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.437,
						"acc_stderr,none": 0.011094009127418984,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5435,
						"acc_stderr,none": 0.011140733053371404,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5575,
						"acc_stderr,none": 0.01110894141174761,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.011122493197456285,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.533,
						"acc_stderr,none": 0.01115875256825067,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5078571428571429,
						"acc_stderr,none": 0.03988534011535243,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7312295973884657,
						"acc_norm,none": 0.7366702937976061,
						"acc_norm_stderr,none": 0.010276185322196766,
						"acc_stderr,none": 0.010343392940090013,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.26414389410760036,
						"acc_norm,none": 0.30572160546541416,
						"acc_norm_stderr,none": 0.0033659142084052703,
						"acc_stderr,none": 0.0032209937955336347,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.646,
						"acc_stderr,none": 0.021407582047916447,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7187471051029983,
						"acc_norm,none": 0.5022687591886176,
						"acc_norm_stderr,none": 0.004176892732703178,
						"acc_stderr,none": 0.14816415249964396,
						"alias": "pythia",
						"bits_per_byte,none": 0.756782382110838,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.689717870485216,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 6.581986537574138,
						"perplexity_stderr,none": 0.17477792177799384,
						"word_perplexity,none": 16.52818534984857,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.35106382978723405,
						"acc_norm,none": 0.3971631205673759,
						"acc_norm_stderr,none": 0.04753935337666359,
						"acc_stderr,none": 0.040881997002059974,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.425,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.04583492485141056,
						"acc_stderr,none": 0.04531634835874827,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.33125,
						"acc_norm,none": 0.4125,
						"acc_norm_stderr,none": 0.039040677866833816,
						"acc_stderr,none": 0.03732598513993525,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.33098591549295775,
						"acc_norm,none": 0.34507042253521125,
						"acc_norm_stderr,none": 0.028259075656935143,
						"acc_stderr,none": 0.02797236390054683,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.510342302764049,
						"acc_stderr,none": 0.006763963096653709,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.4187979223348998,
						"acc_stderr,none": 0.002453687998232456,
						"alias": "qqp",
						"f1,none": 0.45218445470228935,
						"f1_stderr,none": 0.0029847338660045123
					},
					"race": {
						"acc,none": 0.36555023923444974,
						"acc_stderr,none": 0.01490465424718231,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5451263537906137,
						"acc_stderr,none": 0.029973636495415252,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.904,
						"acc_norm,none": 0.846,
						"acc_norm_stderr,none": 0.01141991306509869,
						"acc_stderr,none": 0.009320454434783222,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5451263537906137,
						"acc_stderr,none": 0.029973636495415252,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.4908256880733945,
						"acc_stderr,none": 0.016939001525351532,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5026991902429271,
						"acc_norm,none": 0.6822953114065781,
						"acc_norm_stderr,none": 0.003291764275270711,
						"acc_stderr,none": 0.003535040535935041,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5447406076336895,
						"acc_stderr,none": 0.02856906246199001,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5004006410256411,
						"acc_stderr,none": 0.005004253819939726,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.6129522651261782,
						"acc_stderr,none": 0.0049037129142015645,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5221568627450981,
						"acc_stderr,none": 0.004946116891508721,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.33411149075668534,
						"acc_stderr,none": 0.052722485787617,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.19583843329253367,
						"bleu_acc_stderr,none": 0.00019299723203193512,
						"bleu_diff,none": -1.8214976706312949,
						"bleu_diff_stderr,none": 0.13865372230348294,
						"bleu_max,none": 6.433425824915947,
						"bleu_max_stderr,none": 0.16590804599147632,
						"rouge1_acc,none": 0.20563035495716034,
						"rouge1_acc_stderr,none": 0.00020017954911440305,
						"rouge1_diff,none": -3.2308274257746885,
						"rouge1_diff_stderr,none": 0.25517163532724385,
						"rouge1_max,none": 17.045475467984797,
						"rouge1_max_stderr,none": 0.6024886029445912,
						"rouge2_acc,none": 0.09179926560587515,
						"rouge2_acc_stderr,none": 0.00010217176524521813,
						"rouge2_diff,none": -3.6813286748262195,
						"rouge2_diff_stderr,none": 0.32889411116487677,
						"rouge2_max,none": 9.423257042100992,
						"rouge2_max_stderr,none": 0.4018387405742677,
						"rougeL_acc,none": 0.21052631578947367,
						"rougeL_acc_stderr,none": 0.00020368258106566656,
						"rougeL_diff,none": -3.154937419393459,
						"rougeL_diff_stderr,none": 0.25808985785927113,
						"rougeL_max,none": 16.178992153016562,
						"rougeL_max_stderr,none": 0.5560450337526428
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.19583843329253367,
						"bleu_acc_stderr,none": 0.01389234436774208,
						"bleu_diff,none": -1.8214976706312949,
						"bleu_diff_stderr,none": 0.37236235349922653,
						"bleu_max,none": 6.433425824915947,
						"bleu_max_stderr,none": 0.40731811399872253,
						"rouge1_acc,none": 0.20563035495716034,
						"rouge1_acc_stderr,none": 0.014148482219460964,
						"rouge1_diff,none": -3.2308274257746885,
						"rouge1_diff_stderr,none": 0.5051451626287674,
						"rouge1_max,none": 17.045475467984797,
						"rouge1_max_stderr,none": 0.7762013932895194,
						"rouge2_acc,none": 0.09179926560587515,
						"rouge2_acc_stderr,none": 0.0101080050081714,
						"rouge2_diff,none": -3.6813286748262195,
						"rouge2_diff_stderr,none": 0.5734929041974947,
						"rouge2_max,none": 9.423257042100992,
						"rouge2_max_stderr,none": 0.6339075173669009,
						"rougeL_acc,none": 0.21052631578947367,
						"rougeL_acc_stderr,none": 0.014271740645964197,
						"rougeL_diff,none": -3.154937419393459,
						"rougeL_diff_stderr,none": 0.5080254500113859,
						"rougeL_max,none": 16.178992153016562,
						"rougeL_max_stderr,none": 0.7456842721639252
					},
					"truthfulqa_mc1": {
						"acc,none": 0.22643818849449204,
						"acc_stderr,none": 0.014651337324602592,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.387948141887782,
						"acc_stderr,none": 0.014007029789874701,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.021161417322834646,
						"exact_match_stderr,none": 0.0031935443667984284
					},
					"wic": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.01981072129375818,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.756782382110838,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.689717870485216,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 16.52818534984857,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6385161799526441,
						"acc_stderr,none": 0.013502479670791294,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4084507042253521,
						"acc_stderr,none": 0.05875113694257524,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7728937728937729,
						"acc_stderr,none": 0.02540329042459515,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5709090909090908,
						"acc_stderr,none": 0.06135942275478038,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.482,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.020475118092988978,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668093,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.592,
						"acc_stderr,none": 0.02200091089387719,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.022261697292270132,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.02018670369357085,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.021352091786223104,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.41204819277108434,
						"acc_stderr,none": 0.051535476594892576,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3377510040160643,
						"acc_stderr,none": 0.009479742273956477,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3779116465863454,
						"acc_stderr,none": 0.009718712281227459,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.41365461847389556,
						"acc_stderr,none": 0.009871502159099368,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3650602409638554,
						"acc_stderr,none": 0.009650194822749637,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5261044176706827,
						"acc_stderr,none": 0.01000840465166064,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4879518072289157,
						"acc_stderr,none": 0.010019162857624487,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.478714859437751,
						"acc_stderr,none": 0.010012987604500423,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.4666666666666667,
						"acc_stderr,none": 0.00999977679318763,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.43253012048192774,
						"acc_stderr,none": 0.009930409027139453,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3855421686746988,
						"acc_stderr,none": 0.009755949341224318,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3437751004016064,
						"acc_stderr,none": 0.009520310502882936,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3522088353413655,
						"acc_stderr,none": 0.009574259292495757,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.42289156626506025,
						"acc_stderr,none": 0.009902179034797438,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.44497991967871486,
						"acc_stderr,none": 0.009961210239024633,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3449799196787149,
						"acc_stderr,none": 0.009528219800053311,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5927441188857469,
						"acc_stderr,none": 0.05262352730974911,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5883520847121112,
						"acc_stderr,none": 0.01266464832921408,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7081403044341495,
						"acc_stderr,none": 0.01169925603764938,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6598279285241562,
						"acc_stderr,none": 0.012192034998028832,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.57114493712773,
						"acc_stderr,none": 0.012736202713147777,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6048974189278623,
						"acc_stderr,none": 0.012580772976133262,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6419589675711449,
						"acc_stderr,none": 0.012337624883487575,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.48378557246856385,
						"acc_stderr,none": 0.012860357805055867,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5268034414295168,
						"acc_stderr,none": 0.012848623899505765,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5413633355393779,
						"acc_stderr,none": 0.012823020340169822,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5744540039708802,
						"acc_stderr,none": 0.012723670419166326,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6194573130377233,
						"acc_stderr,none": 0.012494500786685344,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7442121825129242,
						"acc_stderr,none": 0.06414679137553342,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8219354838709677,
						"acc_stderr,none": 0.007935777723887321,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.0506639425494172,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6037539103232534,
						"acc_stderr,none": 0.015802642616557255,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7680608365019012,
						"acc_stderr,none": 0.026075593860304693,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5714285714285714,
						"acc_stderr,none": 0.02792722339076032,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7559523809523809,
						"acc_stderr,none": 0.01915139944664688,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "bigscience/bloom-7b1"
	},
	"bigscience/bloomz-7b1": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.62119503945885,
						"acc_norm,none": 0.6319052987598647,
						"acc_norm_stderr,none": 0.04681021500911714,
						"acc_stderr,none": 0.052087030348446986,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.395625,
						"acc_stderr,none": 0.016247235240295803,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.03605,
						"acc_stderr,none": 0.03797688372091277,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8312985074626865,
						"acc_stderr,none": 0.16015233757543007,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.4234769687964339,
						"acc_norm,none": 0.4234769687964339,
						"acc_norm_stderr,none": 0.1397281048036323,
						"acc_stderr,none": 0.1397281048036323,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.4344672768088413,
						"acc_norm,none": 0.4344672768088413,
						"acc_norm_stderr,none": 0.10802201062229827,
						"acc_stderr,none": 0.10802201062229827,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.6373919200954083,
						"likelihood_diff_stderr,none": 0.5335663163108918,
						"pct_stereotype,none": 0.5891472868217054,
						"pct_stereotype_stderr,none": 0.06158194126773133
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.31151574803149606,
						"exact_match_stderr,none": 0.010276188141417548
					},
					"glue": {
						"acc,none": 0.7951516473597499,
						"acc_stderr,none": 0.09583486267801569,
						"alias": "glue",
						"f1,none": 0.8453922670055515,
						"f1_stderr,none": 8.761722669965732e-06,
						"mcc,none": 0.023156201795729782,
						"mcc_stderr,none": 0.0007343167152396528
					},
					"kmmlu": {
						"acc,none": 0.2823563384348831,
						"acc_norm,none": 0.2823563384348831,
						"acc_norm_stderr,none": 0.028448743947617098,
						"acc_stderr,none": 0.028448743947617098,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.48366586274939705,
						"acc_norm,none": 0.458,
						"acc_norm_stderr,none": 0.0004974669338677383,
						"acc_stderr,none": 0.03938026463756901,
						"alias": "kobest",
						"f1,none": 0.4008933058010713,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5701533087521832,
						"acc_stderr,none": 0.008839314734836581,
						"alias": "lambada",
						"perplexity,none": 6.623310101983335,
						"perplexity_stderr,none": 0.19128433346243753
					},
					"lambada_cloze": {
						"acc,none": 0.6759169415874248,
						"acc_stderr,none": 0.006531807354538854,
						"alias": "lambada_cloze",
						"perplexity,none": 5.658961758948961,
						"perplexity_stderr,none": 0.19475118431635088
					},
					"lambada_multilingual": {
						"acc,none": 0.387036677663497,
						"acc_stderr,none": 0.07631456241301143,
						"alias": "lambada_multilingual",
						"perplexity,none": 150.45866292395567,
						"perplexity_stderr,none": 118.53766910601625
					},
					"mmlu": {
						"acc,none": 0.4417461900014243,
						"acc_stderr,none": 0.09699659607988127,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.39723698193411267,
						"acc_stderr,none": 0.10549318923264882,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.5062761506276151,
						"acc_stderr,none": 0.08848608393891341,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4972375690607735,
						"acc_stderr,none": 0.0746346040442146,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3904218204884237,
						"acc_stderr,none": 0.07881578196696179,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.4049680624556423,
						"acc_norm,none": 0.36820801304882445,
						"acc_norm_stderr,none": 0.00010185773823408252,
						"acc_stderr,none": 0.08075676032306833,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.42342857142857143,
						"acc_stderr,none": 0.03819682957619193,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.744149137687076,
						"acc_norm,none": 0.6359749476631048,
						"acc_norm_stderr,none": 0.004534576525321305,
						"acc_stderr,none": 0.1583908227549397,
						"alias": "pythia",
						"bits_per_byte,none": 0.8019798256179487,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.74349209823089,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 6.7286977691619585,
						"perplexity_stderr,none": 0.19227626749068627,
						"word_perplexity,none": 19.542559580463433,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.44148936170212766,
						"acc_norm,none": 0.4804964539007092,
						"acc_norm_stderr,none": 0.0477431072752235,
						"acc_stderr,none": 0.042071189561705316,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.7620045921932714,
						"acc_stderr,none": 0.07602240133716677,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3870427130403125,
						"acc_stderr,none": 0.06374189400054577,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.627906976744186,
						"bleu_acc_stderr,none": 0.00028632329080902595,
						"bleu_diff,none": 9.561125832469621,
						"bleu_diff_stderr,none": 0.31338849821976683,
						"bleu_max,none": 13.075759790828645,
						"bleu_max_stderr,none": 0.31515951040802814,
						"rouge1_acc,none": 0.627906976744186,
						"rouge1_acc_stderr,none": 0.00028632329080902606,
						"rouge1_diff,none": 23.369111546760966,
						"rouge1_diff_stderr,none": 1.7299219279298712,
						"rouge1_max,none": 36.85094253800928,
						"rouge1_max_stderr,none": 1.167303442882788,
						"rouge2_acc,none": 0.3317013463892289,
						"rouge2_acc_stderr,none": 0.00027166122940294464,
						"rouge2_diff,none": 18.887865546721432,
						"rouge2_diff_stderr,none": 1.6249848069665063,
						"rouge2_max,none": 22.848898191419927,
						"rouge2_max_stderr,none": 1.3939104530862922,
						"rougeL_acc,none": 0.6254589963280294,
						"rougeL_acc_stderr,none": 0.0002870833826475051,
						"rougeL_diff,none": 23.284293934721113,
						"rougeL_diff_stderr,none": 1.7306421906511726,
						"rougeL_max,none": 36.285903242985505,
						"rougeL_max_stderr,none": 1.1682370161354918
					},
					"xcopa": {
						"acc,none": 0.5478181818181819,
						"acc_stderr,none": 0.038919558874417066,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.3783132530120482,
						"acc_stderr,none": 0.03808286230184794,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5717465856446664,
						"acc_stderr,none": 0.05944752341985826,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7345470892335356,
						"acc_stderr,none": 0.07425083824995865,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.62119503945885,
						"acc_norm,none": 0.6319052987598647,
						"acc_norm_stderr,none": 0.04681021500911714,
						"acc_stderr,none": 0.052087030348446986,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.395625,
						"acc_stderr,none": 0.016247235240295803,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.402,
						"acc_stderr,none": 0.015512467135715075,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.382,
						"acc_stderr,none": 0.015372453034968524,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.40166666666666667,
						"acc_stderr,none": 0.014157771660055659,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.40187713310580203,
						"acc_norm,none": 0.43600682593856654,
						"acc_norm_stderr,none": 0.014491225699230916,
						"acc_stderr,none": 0.01432726861457828,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7293771043771043,
						"acc_norm,none": 0.7285353535353535,
						"acc_norm_stderr,none": 0.009125362970360623,
						"acc_stderr,none": 0.009116466166403827,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.03605,
						"acc_stderr,none": 0.03797688372091277,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.064,
						"acc_stderr,none": 0.005474210764278864,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.072,
						"acc_stderr,none": 0.005781410931267408,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.055,
						"acc_stderr,none": 0.005099068566917282,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.1445,
						"acc_stderr,none": 0.007863891873474606,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.008,
						"acc_stderr,none": 0.001992482118488464,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.015,
						"acc_stderr,none": 0.0027186753387999493,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.0007069298939339426,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.0007069298939339508,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"blimp": {
						"acc,none": 0.8312985074626865,
						"acc_stderr,none": 0.16015233757543007,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.818,
						"acc_stderr,none": 0.01220758063766215,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767632,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.802,
						"acc_stderr,none": 0.01260773393417531,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248139,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.013019735539307799,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.505,
						"acc_stderr,none": 0.015818508944436656,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.717,
						"acc_stderr,none": 0.014251810906481753,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336667,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403637,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.0044294039801783605,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745937,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.0058145342727349255,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295438,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323492,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.00768700787628642,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.0054835270646791945,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832031,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.01295371756673723,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319332,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.0111717862854965,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024963,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045044,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.262,
						"acc_stderr,none": 0.013912208651021342,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400238,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.012461592646659986,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.691,
						"acc_stderr,none": 0.014619600977206488,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.827,
						"acc_stderr,none": 0.01196721413755995,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998266,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639234,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333442,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118588,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.636,
						"acc_stderr,none": 0.015222868840522022,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.01265543994336665,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.525,
						"acc_stderr,none": 0.015799513429996012,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.783,
						"acc_stderr,none": 0.01304151375727071,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.01159890229868901,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525061,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621243,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099177,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333445,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298284,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.829,
						"acc_stderr,none": 0.011912216456264602,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.01444273494157502,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.333,
						"acc_stderr,none": 0.014910846164229868,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380713,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.776,
						"acc_stderr,none": 0.013190830072364462,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.402,
						"acc_stderr,none": 0.015512467135715077,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318205,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.014580006055436974,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.01074366913239734,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.843,
						"acc_stderr,none": 0.011510146979230192,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731979,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.824,
						"acc_stderr,none": 0.012048616898597491,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866437,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.00965801621852429,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118774,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.402,
						"acc_stderr,none": 0.015512467135715075,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.362,
						"acc_stderr,none": 0.0152048409129195,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.917737003058104,
						"acc_stderr,none": 0.004805669776357564,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.7678571428571429,
						"acc_stderr,none": 0.0569293902400011,
						"alias": "cb",
						"f1,none": 0.5330969267139479,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.4234769687964339,
						"acc_norm,none": 0.4234769687964339,
						"acc_norm_stderr,none": 0.1397281048036323,
						"acc_stderr,none": 0.1397281048036323,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.3673469387755102,
						"acc_norm,none": 0.3673469387755102,
						"acc_norm_stderr,none": 0.06958255967849923,
						"acc_stderr,none": 0.06958255967849923,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5151515151515151,
						"acc_norm,none": 0.5151515151515151,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.07872958216222173,
						"acc_stderr,none": 0.07872958216222173,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.46808510638297873,
						"acc_norm,none": 0.46808510638297873,
						"acc_norm_stderr,none": 0.07357064625618347,
						"acc_stderr,none": 0.07357064625618347,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.4583333333333333,
						"acc_norm,none": 0.4583333333333333,
						"acc_norm_stderr,none": 0.10389457216622949,
						"acc_stderr,none": 0.10389457216622949,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.41818181818181815,
						"acc_norm,none": 0.41818181818181815,
						"acc_norm_stderr,none": 0.0671242332357016,
						"acc_stderr,none": 0.0671242332357016,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.4864864864864865,
						"acc_norm,none": 0.4864864864864865,
						"acc_norm_stderr,none": 0.08330289193201319,
						"acc_stderr,none": 0.08330289193201319,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755133,
						"acc_stderr,none": 0.08780518530755133,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.4827586206896552,
						"acc_norm,none": 0.4827586206896552,
						"acc_norm_stderr,none": 0.09443492370778725,
						"acc_stderr,none": 0.09443492370778725,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.40540540540540543,
						"acc_norm,none": 0.40540540540540543,
						"acc_norm_stderr,none": 0.08182838794858087,
						"acc_stderr,none": 0.08182838794858087,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.4838709677419355,
						"acc_norm,none": 0.4838709677419355,
						"acc_norm_stderr,none": 0.09123958466923197,
						"acc_stderr,none": 0.09123958466923197,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.07988892740217941,
						"acc_stderr,none": 0.07988892740217941,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.55,
						"acc_norm,none": 0.55,
						"acc_norm_stderr,none": 0.11413288653790232,
						"acc_stderr,none": 0.11413288653790232,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.4444444444444444,
						"acc_norm,none": 0.4444444444444444,
						"acc_norm_stderr,none": 0.12051692101036454,
						"acc_stderr,none": 0.12051692101036454,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.5789473684210527,
						"acc_norm,none": 0.5789473684210527,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.6842105263157895,
						"acc_norm,none": 0.6842105263157895,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002614,
						"acc_stderr,none": 0.07770873402002614,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.10154334054280736,
						"acc_stderr,none": 0.10154334054280736,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.5416666666666666,
						"acc_norm,none": 0.5416666666666666,
						"acc_norm_stderr,none": 0.10389457216622949,
						"acc_stderr,none": 0.10389457216622949,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.5789473684210527,
						"acc_norm,none": 0.5789473684210527,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.4583333333333333,
						"acc_norm,none": 0.4583333333333333,
						"acc_norm_stderr,none": 0.10389457216622949,
						"acc_stderr,none": 0.10389457216622949,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.6190476190476191,
						"acc_norm,none": 0.6190476190476191,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.45,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.11413288653790232,
						"acc_stderr,none": 0.11413288653790232,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.5833333333333334,
						"acc_norm,none": 0.5833333333333334,
						"acc_norm_stderr,none": 0.1486470975026408,
						"acc_stderr,none": 0.1486470975026408,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.4782608695652174,
						"acc_norm,none": 0.4782608695652174,
						"acc_norm_stderr,none": 0.10649955403405124,
						"acc_stderr,none": 0.10649955403405124,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.07142857142857147,
						"acc_stderr,none": 0.07142857142857147,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.4827586206896552,
						"acc_norm,none": 0.4827586206896552,
						"acc_norm_stderr,none": 0.09443492370778725,
						"acc_stderr,none": 0.09443492370778725,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.32653061224489793,
						"acc_norm,none": 0.32653061224489793,
						"acc_norm_stderr,none": 0.06768622021133469,
						"acc_stderr,none": 0.06768622021133469,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.5681818181818182,
						"acc_norm,none": 0.5681818181818182,
						"acc_norm_stderr,none": 0.07553702921752882,
						"acc_stderr,none": 0.07553702921752882,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.4782608695652174,
						"acc_norm,none": 0.4782608695652174,
						"acc_norm_stderr,none": 0.07446511639805872,
						"acc_stderr,none": 0.07446511639805872,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.5217391304347826,
						"acc_norm,none": 0.5217391304347826,
						"acc_norm_stderr,none": 0.10649955403405124,
						"acc_stderr,none": 0.10649955403405124,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.4344672768088413,
						"acc_norm,none": 0.4344672768088413,
						"acc_norm_stderr,none": 0.10802201062229827,
						"acc_stderr,none": 0.10802201062229827,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.4378698224852071,
						"acc_norm,none": 0.4378698224852071,
						"acc_norm_stderr,none": 0.0382768611753937,
						"acc_stderr,none": 0.0382768611753937,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.037698374558241474,
						"acc_stderr,none": 0.037698374558241474,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.6,
						"acc_norm,none": 0.6,
						"acc_norm_stderr,none": 0.03885143449429052,
						"acc_stderr,none": 0.03885143449429052,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.03825460278380026,
						"acc_stderr,none": 0.03825460278380026,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.03462071128843533,
						"acc_stderr,none": 0.03462071128843533,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.03839344480212195,
						"acc_stderr,none": 0.03839344480212195,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.648854961832061,
						"acc_norm,none": 0.648854961832061,
						"acc_norm_stderr,none": 0.04186445163013751,
						"acc_stderr,none": 0.04186445163013751,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.47058823529411764,
						"acc_norm,none": 0.47058823529411764,
						"acc_norm_stderr,none": 0.04295863196118949,
						"acc_stderr,none": 0.04295863196118949,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.5514018691588785,
						"acc_norm,none": 0.5514018691588785,
						"acc_norm_stderr,none": 0.04830698295619322,
						"acc_stderr,none": 0.04830698295619322,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.544891640866873,
						"acc_norm,none": 0.544891640866873,
						"acc_norm_stderr,none": 0.027751377369447577,
						"acc_stderr,none": 0.027751377369447577,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3431372549019608,
						"acc_norm,none": 0.3431372549019608,
						"acc_norm_stderr,none": 0.033321399446680854,
						"acc_stderr,none": 0.033321399446680854,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.553072625698324,
						"acc_norm,none": 0.553072625698324,
						"acc_norm_stderr,none": 0.03726486555057904,
						"acc_stderr,none": 0.03726486555057904,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.379746835443038,
						"acc_norm,none": 0.379746835443038,
						"acc_norm_stderr,none": 0.031591887529658504,
						"acc_stderr,none": 0.031591887529658504,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.04022559246936712,
						"acc_stderr,none": 0.04022559246936712,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.5981308411214953,
						"acc_norm,none": 0.5981308411214953,
						"acc_norm_stderr,none": 0.04761979313593577,
						"acc_stderr,none": 0.04761979313593577,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.4056603773584906,
						"acc_norm,none": 0.4056603773584906,
						"acc_norm_stderr,none": 0.04791858528000114,
						"acc_stderr,none": 0.04791858528000114,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.3425925925925926,
						"acc_norm,none": 0.3425925925925926,
						"acc_norm_stderr,none": 0.045879047413018105,
						"acc_stderr,none": 0.045879047413018105,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.13333333333333333,
						"acc_norm,none": 0.13333333333333333,
						"acc_norm_stderr,none": 0.03333333333333332,
						"acc_stderr,none": 0.03333333333333332,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.36792452830188677,
						"acc_norm,none": 0.36792452830188677,
						"acc_norm_stderr,none": 0.04706187110761455,
						"acc_stderr,none": 0.04706187110761455,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.39194139194139194,
						"acc_norm,none": 0.39194139194139194,
						"acc_norm_stderr,none": 0.029600485465541292,
						"acc_stderr,none": 0.029600485465541292,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.45588235294117646,
						"acc_norm,none": 0.45588235294117646,
						"acc_norm_stderr,none": 0.03495624522015473,
						"acc_stderr,none": 0.03495624522015473,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.5146198830409356,
						"acc_norm,none": 0.5146198830409356,
						"acc_norm_stderr,none": 0.03833185275213025,
						"acc_stderr,none": 0.03833185275213025,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.40816326530612246,
						"acc_norm,none": 0.40816326530612246,
						"acc_norm_stderr,none": 0.04067630441457486,
						"acc_stderr,none": 0.04067630441457486,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.37410071942446044,
						"acc_norm,none": 0.37410071942446044,
						"acc_norm_stderr,none": 0.04119143810981593,
						"acc_stderr,none": 0.04119143810981593,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.44025157232704404,
						"acc_norm,none": 0.44025157232704404,
						"acc_norm_stderr,none": 0.03949283907134624,
						"acc_stderr,none": 0.03949283907134624,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.5398773006134969,
						"acc_norm,none": 0.5398773006134969,
						"acc_norm_stderr,none": 0.03915857291436972,
						"acc_stderr,none": 0.03915857291436972,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.4186046511627907,
						"acc_norm,none": 0.4186046511627907,
						"acc_norm_stderr,none": 0.03772591189087504,
						"acc_stderr,none": 0.03772591189087504,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.3373015873015873,
						"acc_norm,none": 0.3373015873015873,
						"acc_norm_stderr,none": 0.029842162912104356,
						"acc_stderr,none": 0.029842162912104356,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.4494949494949495,
						"acc_norm,none": 0.4494949494949495,
						"acc_norm_stderr,none": 0.0354413249194797,
						"acc_stderr,none": 0.0354413249194797,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.592436974789916,
						"acc_norm,none": 0.592436974789916,
						"acc_norm_stderr,none": 0.031918633744784645,
						"acc_stderr,none": 0.031918633744784645,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2782608695652174,
						"acc_norm,none": 0.2782608695652174,
						"acc_norm_stderr,none": 0.029614094221633736,
						"acc_stderr,none": 0.029614094221633736,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.04232073695151589,
						"acc_stderr,none": 0.04232073695151589,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.46853146853146854,
						"acc_norm,none": 0.46853146853146854,
						"acc_norm_stderr,none": 0.04187588397445898,
						"acc_stderr,none": 0.04187588397445898,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.3693181818181818,
						"acc_norm,none": 0.3693181818181818,
						"acc_norm_stderr,none": 0.03648265829887591,
						"acc_stderr,none": 0.03648265829887591,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.47651006711409394,
						"acc_norm,none": 0.47651006711409394,
						"acc_norm_stderr,none": 0.041054365986755055,
						"acc_stderr,none": 0.041054365986755055,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.0325059328741737,
						"acc_stderr,none": 0.0325059328741737,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.32575757575757575,
						"acc_norm,none": 0.32575757575757575,
						"acc_norm_stderr,none": 0.04094677028657698,
						"acc_stderr,none": 0.04094677028657698,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.4576271186440678,
						"acc_norm,none": 0.4576271186440678,
						"acc_norm_stderr,none": 0.046058726812661495,
						"acc_stderr,none": 0.046058726812661495,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.22560975609756098,
						"acc_norm,none": 0.22560975609756098,
						"acc_norm_stderr,none": 0.03273897454566342,
						"acc_stderr,none": 0.03273897454566342,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.33636363636363636,
						"acc_norm,none": 0.33636363636363636,
						"acc_norm_stderr,none": 0.04525393596302506,
						"acc_stderr,none": 0.04525393596302506,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3986013986013986,
						"acc_norm,none": 0.3986013986013986,
						"acc_norm_stderr,none": 0.04108719032366424,
						"acc_stderr,none": 0.04108719032366424,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.4365079365079365,
						"acc_norm,none": 0.4365079365079365,
						"acc_norm_stderr,none": 0.04435932892851466,
						"acc_stderr,none": 0.04435932892851466,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.3891891891891892,
						"acc_norm,none": 0.3891891891891892,
						"acc_norm_stderr,none": 0.03594386960243732,
						"acc_stderr,none": 0.03594386960243732,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.47093023255813954,
						"acc_norm,none": 0.47093023255813954,
						"acc_norm_stderr,none": 0.0381712782490057,
						"acc_stderr,none": 0.0381712782490057,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.43552311435523117,
						"acc_norm,none": 0.43552311435523117,
						"acc_norm_stderr,none": 0.024487066333143778,
						"acc_stderr,none": 0.024487066333143778,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.7523364485981309,
						"acc_norm,none": 0.7523364485981309,
						"acc_norm_stderr,none": 0.029576535293164476,
						"acc_stderr,none": 0.029576535293164476,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3821138211382114,
						"acc_norm,none": 0.3821138211382114,
						"acc_norm_stderr,none": 0.043991695270045095,
						"acc_stderr,none": 0.043991695270045095,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.28688524590163933,
						"acc_norm,none": 0.28688524590163933,
						"acc_norm_stderr,none": 0.041118866352671826,
						"acc_stderr,none": 0.041118866352671826,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.5285714285714286,
						"acc_norm,none": 0.5285714285714286,
						"acc_norm_stderr,none": 0.03452921053595503,
						"acc_stderr,none": 0.03452921053595503,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.5111111111111111,
						"acc_norm,none": 0.5111111111111111,
						"acc_norm_stderr,none": 0.03736252590436864,
						"acc_stderr,none": 0.03736252590436864,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.5502645502645502,
						"acc_norm,none": 0.5502645502645502,
						"acc_norm_stderr,none": 0.03628151418016977,
						"acc_stderr,none": 0.03628151418016977,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.33620689655172414,
						"acc_norm,none": 0.33620689655172414,
						"acc_norm_stderr,none": 0.04405251996570163,
						"acc_stderr,none": 0.04405251996570163,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.4689655172413793,
						"acc_norm,none": 0.4689655172413793,
						"acc_norm_stderr,none": 0.04158632762097828,
						"acc_stderr,none": 0.04158632762097828,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.4666666666666667,
						"acc_norm,none": 0.4666666666666667,
						"acc_norm_stderr,none": 0.048919959047022794,
						"acc_stderr,none": 0.048919959047022794,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.5371428571428571,
						"acc_norm,none": 0.5371428571428571,
						"acc_norm_stderr,none": 0.03780017090541436,
						"acc_stderr,none": 0.03780017090541436,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.36018957345971564,
						"acc_norm,none": 0.36018957345971564,
						"acc_norm_stderr,none": 0.03312695957406923,
						"acc_stderr,none": 0.03312695957406923,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.3191489361702128,
						"acc_norm,none": 0.3191489361702128,
						"acc_norm_stderr,none": 0.024071720210605833,
						"acc_stderr,none": 0.024071720210605833,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.4827586206896552,
						"acc_norm,none": 0.4827586206896552,
						"acc_norm_stderr,none": 0.032878020318813504,
						"acc_stderr,none": 0.032878020318813504,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.4367816091954023,
						"acc_norm,none": 0.4367816091954023,
						"acc_norm_stderr,none": 0.037709218684305705,
						"acc_stderr,none": 0.037709218684305705,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.5111111111111111,
						"acc_norm,none": 0.5111111111111111,
						"acc_norm_stderr,none": 0.043182754919779756,
						"acc_stderr,none": 0.043182754919779756,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.4911504424778761,
						"acc_norm,none": 0.4911504424778761,
						"acc_norm_stderr,none": 0.03332811194650095,
						"acc_stderr,none": 0.03332811194650095,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.46060606060606063,
						"acc_norm,none": 0.46060606060606063,
						"acc_norm_stderr,none": 0.03892207016552013,
						"acc_stderr,none": 0.03892207016552013,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.41621621621621624,
						"acc_norm,none": 0.41621621621621624,
						"acc_norm_stderr,none": 0.03633930360945236,
						"acc_stderr,none": 0.03633930360945236,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.5029585798816568,
						"acc_norm,none": 0.5029585798816568,
						"acc_norm_stderr,none": 0.038575162160962455,
						"acc_stderr,none": 0.038575162160962455,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.4782608695652174,
						"acc_norm,none": 0.4782608695652174,
						"acc_norm_stderr,none": 0.03949109157518469,
						"acc_stderr,none": 0.03949109157518469,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.5625,
						"acc_norm,none": 0.5625,
						"acc_norm_stderr,none": 0.0393415738622931,
						"acc_stderr,none": 0.0393415738622931,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.023156201795729782,
						"mcc_stderr,none": 0.027098278824302713
					},
					"copa": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.03942772444036622,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.6373919200954083,
						"likelihood_diff_stderr,none": 0.5335663163108918,
						"pct_stereotype,none": 0.5891472868217054,
						"pct_stereotype_stderr,none": 0.06158194126773133
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.469066785927251,
						"likelihood_diff_stderr,none": 0.08683323973034045,
						"pct_stereotype,none": 0.6046511627906976,
						"pct_stereotype_stderr,none": 0.011942786593874368
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.431318681318681,
						"likelihood_diff_stderr,none": 0.35299857868223483,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.05128205128205124
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.625,
						"likelihood_diff_stderr,none": 2.088999869446012,
						"pct_stereotype,none": 0.6363636363636364,
						"pct_stereotype_stderr,none": 0.15212000482437738
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.176923076923077,
						"likelihood_diff_stderr,none": 0.6897629513057967,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.05769230769230768
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.529296875,
						"likelihood_diff_stderr,none": 0.18415055521677498,
						"pct_stereotype,none": 0.6,
						"pct_stereotype_stderr,none": 0.027429019252949587
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.3703703703703702,
						"likelihood_diff_stderr,none": 0.22194222688609458,
						"pct_stereotype,none": 0.5601851851851852,
						"pct_stereotype_stderr,none": 0.03385177976044811
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.845486111111111,
						"likelihood_diff_stderr,none": 0.33058481222282504,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.356299212598425,
						"likelihood_diff_stderr,none": 0.14806602271635755,
						"pct_stereotype,none": 0.547244094488189,
						"pct_stereotype_stderr,none": 0.022106430541228055
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.30518018018018,
						"likelihood_diff_stderr,none": 0.3451121369819556,
						"pct_stereotype,none": 0.6126126126126126,
						"pct_stereotype_stderr,none": 0.046448250723550785
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.323924731182796,
						"likelihood_diff_stderr,none": 0.43342681038647696,
						"pct_stereotype,none": 0.6989247311827957,
						"pct_stereotype_stderr,none": 0.047825424305926206
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 3.899342105263158,
						"likelihood_diff_stderr,none": 0.2184030443955797,
						"pct_stereotype,none": 0.6684210526315789,
						"pct_stereotype_stderr,none": 0.0342442478876195
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.80664877757901,
						"likelihood_diff_stderr,none": 0.11287324267967352,
						"pct_stereotype,none": 0.5730471079308289,
						"pct_stereotype_stderr,none": 0.01208225883409121
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 4.397222222222222,
						"likelihood_diff_stderr,none": 0.5123878100224671,
						"pct_stereotype,none": 0.4111111111111111,
						"pct_stereotype_stderr,none": 0.052155640611075554
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.3846153846153846,
						"likelihood_diff_stderr,none": 0.6869113940775836,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.13323467750529824
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.916666666666667,
						"likelihood_diff_stderr,none": 0.7871404779898772,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.0584705346204686
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.2955607476635516,
						"likelihood_diff_stderr,none": 0.18288649966253517,
						"pct_stereotype,none": 0.6137071651090342,
						"pct_stereotype_stderr,none": 0.027218484103343366
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.8982213438735176,
						"likelihood_diff_stderr,none": 0.3797221959962351,
						"pct_stereotype,none": 0.4308300395256917,
						"pct_stereotype_stderr,none": 0.03119418930984328
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 4.472222222222222,
						"likelihood_diff_stderr,none": 0.598066082318113,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.0274456521739133,
						"likelihood_diff_stderr,none": 0.16396079440849806,
						"pct_stereotype,none": 0.46956521739130436,
						"pct_stereotype_stderr,none": 0.023294726417873605
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 4.025,
						"likelihood_diff_stderr,none": 0.530865135191617,
						"pct_stereotype,none": 0.7391304347826086,
						"pct_stereotype_stderr,none": 0.04112631751856163
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 4.016483516483516,
						"likelihood_diff_stderr,none": 0.30106019922301186,
						"pct_stereotype,none": 0.8131868131868132,
						"pct_stereotype_stderr,none": 0.04108446855035883
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.990433673469388,
						"likelihood_diff_stderr,none": 0.37798710652418877,
						"pct_stereotype,none": 0.7091836734693877,
						"pct_stereotype_stderr,none": 0.03252156607969807
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.31151574803149606,
						"exact_match_stderr,none": 0.010276188141417548
					},
					"glue": {
						"acc,none": 0.7951516473597499,
						"acc_stderr,none": 0.09583486267801569,
						"alias": "glue",
						"f1,none": 0.8453922670055515,
						"f1_stderr,none": 8.761722669965732e-06,
						"mcc,none": 0.023156201795729782,
						"mcc_stderr,none": 0.0007343167152396528
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.0,
						"exact_match_stderr,get-answer": 0.0
					},
					"hellaswag": {
						"acc,none": 0.4753037243576977,
						"acc_norm,none": 0.6400119498107947,
						"acc_norm_stderr,none": 0.004790155370993448,
						"acc_stderr,none": 0.004983691099110916,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2823563384348831,
						"acc_norm,none": 0.2823563384348831,
						"acc_norm_stderr,none": 0.028448743947617098,
						"acc_stderr,none": 0.028448743947617098,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.33,
						"acc_norm,none": 0.33,
						"acc_norm_stderr,none": 0.047258156262526045,
						"acc_stderr,none": 0.047258156262526045,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.309,
						"acc_norm,none": 0.309,
						"acc_norm_stderr,none": 0.014619600977206494,
						"acc_stderr,none": 0.014619600977206494,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.298,
						"acc_norm,none": 0.298,
						"acc_norm_stderr,none": 0.014470846741134706,
						"acc_stderr,none": 0.014470846741134706,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986187,
						"acc_stderr,none": 0.014062601350986187,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259701,
						"acc_stderr,none": 0.013929286594259701,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.265,
						"acc_norm,none": 0.265,
						"acc_norm_stderr,none": 0.018032386001530096,
						"acc_stderr,none": 0.018032386001530096,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234202,
						"acc_stderr,none": 0.013807775152234202,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.33,
						"acc_norm,none": 0.33,
						"acc_norm_stderr,none": 0.014876872027456727,
						"acc_stderr,none": 0.014876872027456727,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.014236526215291354,
						"acc_stderr,none": 0.014236526215291354,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.030056479497755487,
						"acc_stderr,none": 0.030056479497755487,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.01391220865102135,
						"acc_stderr,none": 0.01391220865102135,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.27692307692307694,
						"acc_norm,none": 0.27692307692307694,
						"acc_norm_stderr,none": 0.03939825345266469,
						"acc_stderr,none": 0.03939825345266469,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.0440844002276808,
						"acc_stderr,none": 0.0440844002276808,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.284,
						"acc_norm,none": 0.284,
						"acc_norm_stderr,none": 0.01426700906103131,
						"acc_stderr,none": 0.01426700906103131,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.312,
						"acc_norm,none": 0.312,
						"acc_norm_stderr,none": 0.014658474370509015,
						"acc_stderr,none": 0.014658474370509015,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.013531522534515438,
						"acc_stderr,none": 0.013531522534515438,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.014428554438445509,
						"acc_stderr,none": 0.014428554438445509,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145162,
						"acc_stderr,none": 0.013979965645145162,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306265,
						"acc_stderr,none": 0.014158794845306265,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.014221154708434946,
						"acc_stderr,none": 0.014221154708434946,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259719,
						"acc_stderr,none": 0.013929286594259719,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.04408440022768078,
						"acc_stderr,none": 0.04408440022768078,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485265,
						"acc_stderr,none": 0.014174516461485265,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.344,
						"acc_norm,none": 0.344,
						"acc_norm_stderr,none": 0.015029633724408943,
						"acc_stderr,none": 0.015029633724408943,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633916,
						"acc_stderr,none": 0.014046255632633916,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.222,
						"acc_norm,none": 0.222,
						"acc_norm_stderr,none": 0.013148721948877364,
						"acc_stderr,none": 0.013148721948877364,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.014428554438445514,
						"acc_stderr,none": 0.014428554438445514,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986187,
						"acc_stderr,none": 0.014062601350986187,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.01806848202433441,
						"acc_stderr,none": 0.01806848202433441,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.014356395999905696,
						"acc_stderr,none": 0.014356395999905696,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.01407885699246262,
						"acc_stderr,none": 0.01407885699246262,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.302,
						"acc_norm,none": 0.302,
						"acc_norm_stderr,none": 0.014526080235459541,
						"acc_stderr,none": 0.014526080235459541,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.01414298497574066,
						"acc_stderr,none": 0.01414298497574066,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.14,
						"acc_norm,none": 0.14,
						"acc_norm_stderr,none": 0.0348735088019777,
						"acc_stderr,none": 0.0348735088019777,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23333333333333334,
						"acc_norm,none": 0.23333333333333334,
						"acc_norm_stderr,none": 0.02445997952351143,
						"acc_stderr,none": 0.02445997952351143,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.01397996564514516,
						"acc_stderr,none": 0.01397996564514516,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.014356395999905689,
						"acc_stderr,none": 0.014356395999905689,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.298,
						"acc_norm,none": 0.298,
						"acc_norm_stderr,none": 0.014470846741134713,
						"acc_stderr,none": 0.014470846741134713,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.03109395714370027,
						"acc_stderr,none": 0.03109395714370027,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.286,
						"acc_norm,none": 0.286,
						"acc_norm_stderr,none": 0.01429714686251791,
						"acc_stderr,none": 0.01429714686251791,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.01437099598237794,
						"acc_stderr,none": 0.01437099598237794,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.315,
						"acc_norm,none": 0.315,
						"acc_norm_stderr,none": 0.03292865746446489,
						"acc_stderr,none": 0.03292865746446489,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.351,
						"acc_norm,none": 0.351,
						"acc_norm_stderr,none": 0.015100563798316403,
						"acc_stderr,none": 0.015100563798316403,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.48366586274939705,
						"acc_norm,none": 0.458,
						"acc_norm_stderr,none": 0.0004974669338677383,
						"acc_stderr,none": 0.03938026463756901,
						"alias": "kobest",
						"f1,none": 0.4008933058010713,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5270655270655271,
						"acc_stderr,none": 0.01332919606517512,
						"alias": " - kobest_boolq",
						"f1,none": 0.41249899169153825,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.015819173374302706,
						"alias": " - kobest_copa",
						"f1,none": 0.49693551555090576,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.35,
						"acc_norm,none": 0.458,
						"acc_norm_stderr,none": 0.02230396677426996,
						"acc_stderr,none": 0.021352091786223104,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3482344495607286,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.44836272040302266,
						"acc_stderr,none": 0.024991594109841586,
						"alias": " - kobest_sentineg",
						"f1,none": 0.41559960476699404,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5701533087521832,
						"acc_stderr,none": 0.008839314734836581,
						"alias": "lambada",
						"perplexity,none": 6.623310101983335,
						"perplexity_stderr,none": 0.19128433346243753
					},
					"lambada_cloze": {
						"acc,none": 0.6759169415874248,
						"acc_stderr,none": 0.006531807354538854,
						"alias": "lambada_cloze",
						"perplexity,none": 5.658961758948961,
						"perplexity_stderr,none": 0.19475118431635088
					},
					"lambada_multilingual": {
						"acc,none": 0.387036677663497,
						"acc_stderr,none": 0.07631456241301143,
						"alias": "lambada_multilingual",
						"perplexity,none": 150.45866292395567,
						"perplexity_stderr,none": 118.53766910601625
					},
					"lambada_openai": {
						"acc,none": 0.5594799146128469,
						"acc_stderr,none": 0.00691651272281676,
						"alias": " - lambada_openai",
						"perplexity,none": 6.7286977691619585,
						"perplexity_stderr,none": 0.19227626749068627
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.676693188433922,
						"acc_stderr,none": 0.00651651504970713,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 5.869470309735436,
						"perplexity_stderr,none": 0.18001120832692027
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.22239472152144382,
						"acc_stderr,none": 0.005793672076818077,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 462.1750389429538,
						"perplexity_stderr,none": 32.54998393935206
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.5585096060547254,
						"acc_stderr,none": 0.006918118960619813,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 6.727662785027122,
						"perplexity_stderr,none": 0.1923091089698425
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4125751989132544,
						"acc_stderr,none": 0.006858667841807087,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 40.543535528207826,
						"perplexity_stderr,none": 2.161073425284866
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.43741509800116435,
						"acc_stderr,none": 0.006911192566731795,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 32.41646185707111,
						"perplexity_stderr,none": 1.7849302638425804
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.30428876382689696,
						"acc_stderr,none": 0.006410169885207214,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 210.4306155065184,
						"perplexity_stderr,none": 14.738535774454979
					},
					"lambada_standard": {
						"acc,none": 0.5812148263147681,
						"acc_stderr,none": 0.006873470354770503,
						"alias": " - lambada_standard",
						"perplexity,none": 6.5213071592542295,
						"perplexity_stderr,none": 0.1762722757381793
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.6751406947409276,
						"acc_stderr,none": 0.006524644766835839,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 5.448453208162487,
						"perplexity_stderr,none": 0.14593885241873825
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.3594147582697201,
						"exact_match_stderr,get-answer": 0.012105934643855325
					},
					"logiqa": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.28417818740399386,
						"acc_norm_stderr,none": 0.01769054268019077,
						"acc_stderr,none": 0.016705867034419633,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2633587786259542,
						"acc_norm,none": 0.3059796437659033,
						"acc_norm_stderr,none": 0.011626373730234275,
						"acc_stderr,none": 0.011112552251145356,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2726968174204355,
						"acc_norm,none": 0.26767169179229483,
						"acc_norm_stderr,none": 0.008105031808599677,
						"acc_stderr,none": 0.008152641950549709,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.6801525100614276,
						"acc_stderr,none": 0.004800269573673502,
						"alias": "mc_taco",
						"f1,none": 0.12514484356894554,
						"f1_stderr,none": 0.007716305051013618
					},
					"medmcqa": {
						"acc,none": 0.3679177623715037,
						"acc_norm,none": 0.3679177623715037,
						"acc_norm_stderr,none": 0.007457103253239377,
						"acc_stderr,none": 0.007457103253239377,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3699921445404556,
						"acc_norm,none": 0.3699921445404556,
						"acc_norm_stderr,none": 0.013537101287089803,
						"acc_stderr,none": 0.013537101287089803,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.4417461900014243,
						"acc_stderr,none": 0.09699659607988127,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768079,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.42962962962962964,
						"acc_stderr,none": 0.04276349494376599,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.4342105263157895,
						"acc_stderr,none": 0.040335656678483205,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.05009082659620332,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4981132075471698,
						"acc_stderr,none": 0.030772653642075657,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4583333333333333,
						"acc_stderr,none": 0.04166666666666665,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.050211673156867795,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001974,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3872832369942196,
						"acc_stderr,none": 0.03714325906302065,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.04533838195929776,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.0498887651569859,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3659574468085106,
						"acc_stderr,none": 0.031489558297455304,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.35964912280701755,
						"acc_stderr,none": 0.04514496132873633,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.43448275862068964,
						"acc_stderr,none": 0.04130740879555497,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.291005291005291,
						"acc_stderr,none": 0.023393826500484865,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.31746031746031744,
						"acc_stderr,none": 0.041634530313028585,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909283,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.5387096774193548,
						"acc_stderr,none": 0.028358634859836935,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.35467980295566504,
						"acc_stderr,none": 0.033661244890514495,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.05024183937956913,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.6121212121212121,
						"acc_stderr,none": 0.038049136539710114,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.5555555555555556,
						"acc_stderr,none": 0.03540294377095367,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5440414507772021,
						"acc_stderr,none": 0.035944137112724366,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.4307692307692308,
						"acc_stderr,none": 0.02510682066053975,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3148148148148148,
						"acc_stderr,none": 0.028317533496066482,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.47058823529411764,
						"acc_stderr,none": 0.03242225027115006,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.32450331125827814,
						"acc_stderr,none": 0.038227469376587525,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.6422018348623854,
						"acc_stderr,none": 0.020552060784827825,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.37962962962962965,
						"acc_stderr,none": 0.03309682581119035,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5490196078431373,
						"acc_stderr,none": 0.034924061041636124,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.6624472573839663,
						"acc_stderr,none": 0.030781549102026202,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.5291479820627802,
						"acc_stderr,none": 0.03350073248773403,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.366412213740458,
						"acc_stderr,none": 0.04225875451969638,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.39723698193411267,
						"acc_stderr,none": 0.10549318923264882,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.5702479338842975,
						"acc_stderr,none": 0.04519082021319773,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5370370370370371,
						"acc_stderr,none": 0.04820403072760627,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.5214723926380368,
						"acc_stderr,none": 0.03924746876751129,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.39285714285714285,
						"acc_stderr,none": 0.04635550135609976,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.6116504854368932,
						"acc_stderr,none": 0.0482572933735639,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.7136752136752137,
						"acc_stderr,none": 0.02961432369045666,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.049431107042371025,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5874840357598978,
						"acc_stderr,none": 0.01760414910867193,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.43641618497109824,
						"acc_stderr,none": 0.026700545424943687,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23910614525139665,
						"acc_stderr,none": 0.01426555419233115,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.4869281045751634,
						"acc_stderr,none": 0.028620130800700246,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.5062761506276151,
						"acc_stderr,none": 0.08848608393891341,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4790996784565916,
						"acc_stderr,none": 0.028373270961069414,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4537037037037037,
						"acc_stderr,none": 0.0277012284685426,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.36524822695035464,
						"acc_stderr,none": 0.028723863853281278,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3318122555410691,
						"acc_stderr,none": 0.012026088259897632,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.41911764705882354,
						"acc_stderr,none": 0.029972807170464626,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.41830065359477125,
						"acc_stderr,none": 0.019955975145835546,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.5181818181818182,
						"acc_stderr,none": 0.04785964010794917,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.46530612244897956,
						"acc_stderr,none": 0.03193207024425314,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4972375690607735,
						"acc_stderr,none": 0.0746346040442146,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.582089552238806,
						"acc_stderr,none": 0.03487558640462064,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3904218204884237,
						"acc_stderr,none": 0.07881578196696179,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.050211673156867795,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3795180722891566,
						"acc_stderr,none": 0.03777798822748017,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.4502923976608187,
						"acc_stderr,none": 0.038158273659132366,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.617524197656648,
						"acc_stderr,none": 0.004905756019203571,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.6193043124491456,
						"acc_stderr,none": 0.004897136197694763,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7818627450980392,
						"acc_stderr,none": 0.020470713360036894,
						"alias": "mrpc",
						"f1,none": 0.8585055643879174,
						"f1_stderr,none": 0.014852441904858696
					},
					"multimedqa": {
						"acc,none": 0.4049680624556423,
						"acc_norm,none": 0.36820801304882445,
						"acc_norm_stderr,none": 0.00010185773823408252,
						"acc_stderr,none": 0.08075676032306833,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.11262376237623763,
						"acc_stderr,none": 0.004540800309287676,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6861361942966689,
						"mrr_stderr,none": 0.010342826328408555,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.42663656884875845,
						"r@2_stderr,none": 0.016625411323052963
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6476674207715364,
						"mrr_stderr,none": 0.010437148305972723,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4762979683972912,
						"r@2_stderr,none": 0.016788421275515525
					},
					"openbookqa": {
						"acc,none": 0.318,
						"acc_norm,none": 0.426,
						"acc_norm_stderr,none": 0.022136577335085634,
						"acc_stderr,none": 0.020847571620814007,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.01095719079029897,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3505,
						"acc_stderr,none": 0.01067154233969731,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.432,
						"acc_stderr,none": 0.011079231683079109,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.3915,
						"acc_stderr,none": 0.010916659824821179,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.482,
						"acc_stderr,none": 0.011175886999478619,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5025,
						"acc_stderr,none": 0.011182996230990784,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4055,
						"acc_stderr,none": 0.010981583336946122,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.42342857142857143,
						"acc_stderr,none": 0.03819682957619193,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7611534276387377,
						"acc_norm,none": 0.7752992383025027,
						"acc_norm_stderr,none": 0.009738282586548377,
						"acc_stderr,none": 0.009948120385337494,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.3458048676345004,
						"acc_norm,none": 0.34249573014517504,
						"acc_norm_stderr,none": 0.003466969498106131,
						"acc_stderr,none": 0.0034749003641766007,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.020629569998345396,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.744149137687076,
						"acc_norm,none": 0.6359749476631048,
						"acc_norm_stderr,none": 0.004534576525321305,
						"acc_stderr,none": 0.1583908227549397,
						"alias": "pythia",
						"bits_per_byte,none": 0.8019798256179487,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.74349209823089,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 6.7286977691619585,
						"perplexity_stderr,none": 0.19227626749068627,
						"word_perplexity,none": 19.542559580463433,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.44148936170212766,
						"acc_norm,none": 0.4804964539007092,
						"acc_norm_stderr,none": 0.0477431072752235,
						"acc_stderr,none": 0.042071189561705316,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.5166666666666667,
						"acc_norm,none": 0.5833333333333334,
						"acc_norm_stderr,none": 0.0451938453788867,
						"acc_stderr,none": 0.045809453927047654,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.43125,
						"acc_norm,none": 0.49375,
						"acc_norm_stderr,none": 0.03964948130713095,
						"acc_stderr,none": 0.03927594984018917,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.4154929577464789,
						"acc_norm,none": 0.4295774647887324,
						"acc_norm_stderr,none": 0.029425636435375824,
						"acc_stderr,none": 0.02929432462367856,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.7907742998352554,
						"acc_stderr,none": 0.005503740390451179,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.8761810536730151,
						"acc_stderr,none": 0.001638113469100588,
						"alias": "qqp",
						"f1,none": 0.8449770841075189,
						"f1_stderr,none": 0.002165490395037004
					},
					"race": {
						"acc,none": 0.4574162679425837,
						"acc_stderr,none": 0.015418386849171706,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.8303249097472925,
						"acc_stderr,none": 0.022593241101707042,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.966,
						"acc_norm,none": 0.959,
						"acc_norm_stderr,none": 0.006273624021118796,
						"acc_stderr,none": 0.005733836139695461,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.8303249097472925,
						"acc_stderr,none": 0.022593241101707042,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.9461009174311926,
						"acc_stderr,none": 0.007651566213462467,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5087473757872638,
						"acc_norm,none": 0.6917924622613216,
						"acc_norm_stderr,none": 0.0032646773257822125,
						"acc_stderr,none": 0.0035345510193116277,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.7620045921932714,
						"acc_stderr,none": 0.07602240133716677,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.7895633012820513,
						"acc_stderr,none": 0.0040796572872179825,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.9313874531265836,
						"acc_stderr,none": 0.0025450505120239294,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5711764705882353,
						"acc_stderr,none": 0.004900559447029817,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3870427130403125,
						"acc_stderr,none": 0.06374189400054577,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.627906976744186,
						"bleu_acc_stderr,none": 0.00028632329080902595,
						"bleu_diff,none": 9.561125832469621,
						"bleu_diff_stderr,none": 0.31338849821976683,
						"bleu_max,none": 13.075759790828645,
						"bleu_max_stderr,none": 0.31515951040802814,
						"rouge1_acc,none": 0.627906976744186,
						"rouge1_acc_stderr,none": 0.00028632329080902606,
						"rouge1_diff,none": 23.369111546760966,
						"rouge1_diff_stderr,none": 1.7299219279298712,
						"rouge1_max,none": 36.85094253800928,
						"rouge1_max_stderr,none": 1.167303442882788,
						"rouge2_acc,none": 0.3317013463892289,
						"rouge2_acc_stderr,none": 0.00027166122940294464,
						"rouge2_diff,none": 18.887865546721432,
						"rouge2_diff_stderr,none": 1.6249848069665063,
						"rouge2_max,none": 22.848898191419927,
						"rouge2_max_stderr,none": 1.3939104530862922,
						"rougeL_acc,none": 0.6254589963280294,
						"rougeL_acc_stderr,none": 0.0002870833826475051,
						"rougeL_diff,none": 23.284293934721113,
						"rougeL_diff_stderr,none": 1.7306421906511726,
						"rougeL_max,none": 36.285903242985505,
						"rougeL_max_stderr,none": 1.1682370161354918
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.627906976744186,
						"bleu_acc_stderr,none": 0.016921090118814035,
						"bleu_diff,none": 9.561125832469621,
						"bleu_diff_stderr,none": 0.5598111272739823,
						"bleu_max,none": 13.075759790828645,
						"bleu_max_stderr,none": 0.5613906931968397,
						"rouge1_acc,none": 0.627906976744186,
						"rouge1_acc_stderr,none": 0.016921090118814038,
						"rouge1_diff,none": 23.369111546760966,
						"rouge1_diff_stderr,none": 1.3152649649138652,
						"rouge1_max,none": 36.85094253800928,
						"rouge1_max_stderr,none": 1.0804181796336028,
						"rouge2_acc,none": 0.3317013463892289,
						"rouge2_acc_stderr,none": 0.01648214881024148,
						"rouge2_diff,none": 18.887865546721432,
						"rouge2_diff_stderr,none": 1.2747489191862476,
						"rouge2_max,none": 22.848898191419927,
						"rouge2_max_stderr,none": 1.1806398490167491,
						"rougeL_acc,none": 0.6254589963280294,
						"rougeL_acc_stderr,none": 0.01694353512840532,
						"rougeL_diff,none": 23.284293934721113,
						"rougeL_diff_stderr,none": 1.3155387454009755,
						"rougeL_max,none": 36.285903242985505,
						"rougeL_max_stderr,none": 1.0808501358354412
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2558139534883721,
						"acc_stderr,none": 0.015274176219283368,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.45265709281628275,
						"acc_stderr,none": 0.015341593860194673,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.31151574803149606,
						"exact_match_stderr,none": 0.010276188141417548
					},
					"wic": {
						"acc,none": 0.768025078369906,
						"acc_stderr,none": 0.016723936228019928,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.8019798256179487,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.74349209823089,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 19.542559580463433,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6558800315706393,
						"acc_stderr,none": 0.013352121905005935,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.6056338028169014,
						"acc_stderr,none": 0.058412510854444266,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6057692307692307,
						"acc_stderr,none": 0.04815154775990712,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7875457875457875,
						"acc_stderr,none": 0.02480196713503145,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5478181818181819,
						"acc_stderr,none": 0.038919558874417066,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.022318338119870523,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.021874299301689257,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.02237429816635319,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044296,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.02233126442325838,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.02236516042423134,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.594,
						"acc_stderr,none": 0.021983962090086333,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.622,
						"acc_stderr,none": 0.02170655082451818,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.3783132530120482,
						"acc_stderr,none": 0.03808286230184794,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3293172690763052,
						"acc_stderr,none": 0.009420053435910403,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3421686746987952,
						"acc_stderr,none": 0.009509659143015627,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.42409638554216866,
						"acc_stderr,none": 0.009905918244994484,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3602409638554217,
						"acc_stderr,none": 0.009622597362374079,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.4827309236947791,
						"acc_stderr,none": 0.010016093498409704,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.009556642460138145,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.44859437751004017,
						"acc_stderr,none": 0.009968964736894266,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41285140562248995,
						"acc_stderr,none": 0.009868665943084408,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.37028112449799194,
						"acc_stderr,none": 0.00967891540984029,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.009476976849778582,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3465863453815261,
						"acc_stderr,none": 0.009538660220458994,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3377510040160643,
						"acc_stderr,none": 0.009479742273956485,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.39397590361445783,
						"acc_stderr,none": 0.00979416301490676,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.36626506024096384,
						"acc_stderr,none": 0.009656930886014761,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.37309236947791163,
						"acc_stderr,none": 0.00969387718343044,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5717465856446664,
						"acc_stderr,none": 0.05944752341985826,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5658504301786896,
						"acc_stderr,none": 0.012755046289912223,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7299801455989411,
						"acc_stderr,none": 0.011425228637713692,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6452680344142951,
						"acc_stderr,none": 0.012312089524603852,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.513567174056916,
						"acc_stderr,none": 0.012862387586650079,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5784248841826605,
						"acc_stderr,none": 0.012707862131801905,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5532759761747187,
						"acc_stderr,none": 0.0127938745267302,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4877564526803441,
						"acc_stderr,none": 0.012863267059205548,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5215089344804765,
						"acc_stderr,none": 0.012855214257296608,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5195234943745863,
						"acc_stderr,none": 0.012857312531836848,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5810721376571807,
						"acc_stderr,none": 0.012696855440486899,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5929847782925215,
						"acc_stderr,none": 0.012642664836816924,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7345470892335356,
						"acc_stderr,none": 0.07425083824995865,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8331182795698925,
						"acc_stderr,none": 0.007734631973316416,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109369,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5808133472367049,
						"acc_stderr,none": 0.01594186996770523,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6730038022813688,
						"acc_stderr,none": 0.028982074243683254,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5396825396825397,
						"acc_stderr,none": 0.02812762633423857,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7261904761904762,
						"acc_stderr,none": 0.019882251217620307,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "bigscience/bloomz-7b1"
	},
	"bigscience/bloomz-7b1-mt": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6245772266065389,
						"acc_norm,none": 0.6279594137542277,
						"acc_norm_stderr,none": 0.044784386195247695,
						"acc_stderr,none": 0.05207762210680191,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3996875,
						"acc_stderr,none": 0.01691541719309478,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0366,
						"acc_stderr,none": 0.040574155236561465,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8336865671641791,
						"acc_stderr,none": 0.1559002025244433,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.4086181277860327,
						"acc_norm,none": 0.4086181277860327,
						"acc_norm_stderr,none": 0.14403866314205602,
						"acc_stderr,none": 0.14403866314205602,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.435503367294077,
						"acc_norm,none": 0.435503367294077,
						"acc_norm_stderr,none": 0.10746884259500308,
						"acc_stderr,none": 0.10746884259500308,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.8384950805008944,
						"likelihood_diff_stderr,none": 0.594207754328924,
						"pct_stereotype,none": 0.5913834227787717,
						"pct_stereotype_stderr,none": 0.06528996286533582
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.3031496062992126,
						"exact_match_stderr,none": 0.010198664785988206
					},
					"glue": {
						"acc,none": 0.801542851606087,
						"acc_stderr,none": 0.09924730396109512,
						"alias": "glue",
						"f1,none": 0.85712412474566,
						"f1_stderr,none": 7.441467137764442e-06,
						"mcc,none": 0.03589254563226399,
						"mcc_stderr,none": 0.0001454315140425739
					},
					"kmmlu": {
						"acc,none": 0.277331793242853,
						"acc_norm,none": 0.277331793242853,
						"acc_norm_stderr,none": 0.02768556763224144,
						"acc_stderr,none": 0.02768556763224144,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.4841043630782723,
						"acc_norm,none": 0.452,
						"acc_norm_stderr,none": 0.0004963847695390727,
						"acc_stderr,none": 0.03957258033866218,
						"alias": "kobest",
						"f1,none": 0.40584567134752647,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5673394139336309,
						"acc_stderr,none": 0.0078019799241583355,
						"alias": "lambada",
						"perplexity,none": 6.569091645893632,
						"perplexity_stderr,none": 0.18527322557911466
					},
					"lambada_cloze": {
						"acc,none": 0.6734911701921211,
						"acc_stderr,none": 0.0066274245138747,
						"alias": "lambada_cloze",
						"perplexity,none": 5.771733035039074,
						"perplexity_stderr,none": 0.18955966680759215
					},
					"lambada_multilingual": {
						"acc,none": 0.3825344459538133,
						"acc_stderr,none": 0.07782847016255834,
						"alias": "lambada_multilingual",
						"perplexity,none": 159.92860998449697,
						"perplexity_stderr,none": 126.21127109702662
					},
					"mmlu": {
						"acc,none": 0.43996581683520863,
						"acc_stderr,none": 0.09985373223622389,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.393836344314559,
						"acc_stderr,none": 0.11068831355914426,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.5043450273575797,
						"acc_stderr,none": 0.08747228668961055,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.49951251218719533,
						"acc_stderr,none": 0.08013036553983305,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3872502378686965,
						"acc_stderr,none": 0.07785493894311979,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.4063875088715401,
						"acc_norm,none": 0.37056429084523906,
						"acc_norm_stderr,none": 0.00010241543372288454,
						"acc_stderr,none": 0.07686893603849891,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.44164285714285717,
						"acc_stderr,none": 0.03360326465165433,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7457031171656568,
						"acc_norm,none": 0.6322163804783708,
						"acc_norm_stderr,none": 0.0043889885293719125,
						"acc_stderr,none": 0.15599252416803436,
						"alias": "pythia",
						"bits_per_byte,none": 0.8013693061718576,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.74275444362767,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 6.619105438644448,
						"perplexity_stderr,none": 0.1897348843226533,
						"word_perplexity,none": 19.49838607196728,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4379432624113475,
						"acc_norm,none": 0.4787234042553192,
						"acc_norm_stderr,none": 0.04936844264079171,
						"acc_stderr,none": 0.04260013524670006,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.7910219293867092,
						"acc_stderr,none": 0.07311353664556729,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3933979946240904,
						"acc_stderr,none": 0.062167434079411894,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.7037943696450428,
						"bleu_acc_stderr,none": 0.00025547531237864933,
						"bleu_diff,none": 17.725051646302095,
						"bleu_diff_stderr,none": 0.4690767892279991,
						"bleu_max,none": 21.05783221282457,
						"bleu_max_stderr,none": 0.5018198156702414,
						"rouge1_acc,none": 0.7441860465116279,
						"rouge1_acc_stderr,none": 0.00023330045917772135,
						"rouge1_diff,none": 40.374563354976026,
						"rouge1_diff_stderr,none": 2.0957641410966774,
						"rouge1_max,none": 49.87215207168784,
						"rouge1_max_stderr,none": 1.5554072752397459,
						"rouge2_acc,none": 0.4700122399020808,
						"rouge2_acc_stderr,none": 0.0003052705076523413,
						"rouge2_diff,none": 36.50545880041624,
						"rouge2_diff_stderr,none": 2.2860834352706494,
						"rouge2_max,none": 38.4969887926062,
						"rouge2_max_stderr,none": 2.0849267673981364,
						"rougeL_acc,none": 0.7405140758873929,
						"rougeL_acc_stderr,none": 0.00023548159227945327,
						"rougeL_diff,none": 40.26038709184385,
						"rougeL_diff_stderr,none": 2.102396565924826,
						"rougeL_max,none": 49.32430704016434,
						"rougeL_max_stderr,none": 1.5785700559414388
					},
					"xcopa": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.038320767835798686,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.37627844712182057,
						"acc_stderr,none": 0.03592781598574345,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.574153179712412,
						"acc_stderr,none": 0.06115222376695438,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7271296920656327,
						"acc_stderr,none": 0.07765926605467739,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6245772266065389,
						"acc_norm,none": 0.6279594137542277,
						"acc_norm_stderr,none": 0.044784386195247695,
						"acc_stderr,none": 0.05207762210680191,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3996875,
						"acc_stderr,none": 0.01691541719309478,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.417,
						"acc_stderr,none": 0.015599819048769618,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.386,
						"acc_stderr,none": 0.015402637476784376,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.39666666666666667,
						"acc_stderr,none": 0.014128040196184492,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4052901023890785,
						"acc_norm,none": 0.44112627986348124,
						"acc_norm_stderr,none": 0.014509747749064664,
						"acc_stderr,none": 0.014346869060229332,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7327441077441077,
						"acc_norm,none": 0.7201178451178452,
						"acc_norm_stderr,none": 0.009212077524656533,
						"acc_stderr,none": 0.009080463246017469,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0366,
						"acc_stderr,none": 0.040574155236561465,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0505,
						"acc_stderr,none": 0.0048976390673687465,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0735,
						"acc_stderr,none": 0.005836600719081102,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.056,
						"acc_stderr,none": 0.005142491867889049,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.159,
						"acc_stderr,none": 0.008178810822683121,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0055,
						"acc_stderr,none": 0.0016541593398342208,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.019,
						"acc_stderr,none": 0.0030535490958949058,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521453,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.0007069298939339462,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"blimp": {
						"acc,none": 0.8336865671641791,
						"acc_stderr,none": 0.1559002025244433,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.01180043432464459,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469393,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767632,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.798,
						"acc_stderr,none": 0.012702651587655142,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037183,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.013127502859696235,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.015786868759359002,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.01417451646148525,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747401,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565734,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656804,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139999,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.006199874066337067,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280309,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315151,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323499,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410048,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425658,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319334,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475298,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796396,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024975,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298345,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.258,
						"acc_stderr,none": 0.013842963108656603,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745908,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.802,
						"acc_stderr,none": 0.012607733934175306,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.699,
						"acc_stderr,none": 0.014512395033543143,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.821,
						"acc_stderr,none": 0.012128730605719102,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689096,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817136,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.00833333333333335,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697598,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.654,
						"acc_stderr,none": 0.015050266127564443,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617324,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.521,
						"acc_stderr,none": 0.015805341148131296,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.812,
						"acc_stderr,none": 0.012361586015103761,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.842,
						"acc_stderr,none": 0.011539894677559564,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.006199874066337064,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.010878848714333315,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.00953361892934097,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929341,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.798,
						"acc_stderr,none": 0.012702651587655123,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592078,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.002987963843142667,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731975,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.717,
						"acc_stderr,none": 0.014251810906481734,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.348,
						"acc_stderr,none": 0.01507060460376841,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406146,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651532,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.003583830889403636,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.763,
						"acc_stderr,none": 0.013454070462577973,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.453,
						"acc_stderr,none": 0.015749255189977596,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491108,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910624,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.711,
						"acc_stderr,none": 0.014341711358296198,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.864,
						"acc_stderr,none": 0.010845350230472988,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.01162816469672721,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.826,
						"acc_stderr,none": 0.011994493230973433,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.82,
						"acc_stderr,none": 0.01215515313551195,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400246,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397222,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427417,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.412,
						"acc_stderr,none": 0.015572363292015097,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.376,
						"acc_stderr,none": 0.01532510550889813,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.901223241590214,
						"acc_stderr,none": 0.005218384987776313,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.8214285714285714,
						"acc_stderr,none": 0.05164277182008721,
						"alias": "cb",
						"f1,none": 0.571563088512241,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.4086181277860327,
						"acc_norm,none": 0.4086181277860327,
						"acc_norm_stderr,none": 0.14403866314205602,
						"acc_stderr,none": 0.14403866314205602,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.32653061224489793,
						"acc_norm,none": 0.32653061224489793,
						"acc_norm_stderr,none": 0.06768622021133469,
						"acc_stderr,none": 0.06768622021133469,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.42424242424242425,
						"acc_norm,none": 0.42424242424242425,
						"acc_norm_stderr,none": 0.08736789844447573,
						"acc_stderr,none": 0.08736789844447573,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.425531914893617,
						"acc_norm,none": 0.425531914893617,
						"acc_norm_stderr,none": 0.07289875413448858,
						"acc_stderr,none": 0.07289875413448858,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.41818181818181815,
						"acc_norm,none": 0.41818181818181815,
						"acc_norm_stderr,none": 0.0671242332357016,
						"acc_stderr,none": 0.0671242332357016,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.4594594594594595,
						"acc_norm,none": 0.4594594594594595,
						"acc_norm_stderr,none": 0.08305895907471073,
						"acc_stderr,none": 0.08305895907471073,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755133,
						"acc_stderr,none": 0.08780518530755133,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.5517241379310345,
						"acc_norm,none": 0.5517241379310345,
						"acc_norm_stderr,none": 0.09398415777506855,
						"acc_stderr,none": 0.09398415777506855,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.3783783783783784,
						"acc_norm,none": 0.3783783783783784,
						"acc_norm_stderr,none": 0.08083044344561426,
						"acc_stderr,none": 0.08083044344561426,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.3870967741935484,
						"acc_norm,none": 0.3870967741935484,
						"acc_norm_stderr,none": 0.08892934678767887,
						"acc_stderr,none": 0.08892934678767887,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.3225806451612903,
						"acc_norm,none": 0.3225806451612903,
						"acc_norm_stderr,none": 0.08534681648595455,
						"acc_stderr,none": 0.08534681648595455,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522557,
						"acc_stderr,none": 0.11369720523522557,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.65,
						"acc_norm,none": 0.65,
						"acc_norm_stderr,none": 0.10942433098048308,
						"acc_stderr,none": 0.10942433098048308,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.11433239009500591,
						"acc_stderr,none": 0.11433239009500591,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.5789473684210527,
						"acc_norm,none": 0.5789473684210527,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.05763033956734371,
						"acc_stderr,none": 0.05763033956734371,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.10154334054280735,
						"acc_stderr,none": 0.10154334054280735,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.5833333333333334,
						"acc_norm,none": 0.5833333333333334,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.631578947368421,
						"acc_norm,none": 0.631578947368421,
						"acc_norm_stderr,none": 0.11369720523522561,
						"acc_stderr,none": 0.11369720523522561,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.5416666666666666,
						"acc_norm,none": 0.5416666666666666,
						"acc_norm_stderr,none": 0.10389457216622949,
						"acc_stderr,none": 0.10389457216622949,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.6190476190476191,
						"acc_norm,none": 0.6190476190476191,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.1486470975026408,
						"acc_stderr,none": 0.1486470975026408,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.47619047619047616,
						"acc_norm,none": 0.47619047619047616,
						"acc_norm_stderr,none": 0.11167656571008164,
						"acc_stderr,none": 0.11167656571008164,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.4782608695652174,
						"acc_norm,none": 0.4782608695652174,
						"acc_norm_stderr,none": 0.10649955403405124,
						"acc_stderr,none": 0.10649955403405124,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522561,
						"acc_stderr,none": 0.11369720523522561,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.4489795918367347,
						"acc_norm,none": 0.4489795918367347,
						"acc_norm_stderr,none": 0.07179207795648103,
						"acc_stderr,none": 0.07179207795648103,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629555,
						"acc_stderr,none": 0.10497277621629555,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.4482758620689655,
						"acc_norm,none": 0.4482758620689655,
						"acc_norm_stderr,none": 0.09398415777506855,
						"acc_stderr,none": 0.09398415777506855,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.3673469387755102,
						"acc_norm,none": 0.3673469387755102,
						"acc_norm_stderr,none": 0.06958255967849926,
						"acc_stderr,none": 0.06958255967849926,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.5909090909090909,
						"acc_norm,none": 0.5909090909090909,
						"acc_norm_stderr,none": 0.07497837474124878,
						"acc_stderr,none": 0.07497837474124878,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.45652173913043476,
						"acc_norm,none": 0.45652173913043476,
						"acc_norm_stderr,none": 0.0742532664199971,
						"acc_stderr,none": 0.0742532664199971,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.5652173913043478,
						"acc_norm,none": 0.5652173913043478,
						"acc_norm_stderr,none": 0.10568965974008646,
						"acc_stderr,none": 0.10568965974008646,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.435503367294077,
						"acc_norm,none": 0.435503367294077,
						"acc_norm_stderr,none": 0.10746884259500308,
						"acc_stderr,none": 0.10746884259500308,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.4319526627218935,
						"acc_norm,none": 0.4319526627218935,
						"acc_norm_stderr,none": 0.03821692157382018,
						"acc_stderr,none": 0.03821692157382018,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.30405405405405406,
						"acc_norm,none": 0.30405405405405406,
						"acc_norm_stderr,none": 0.03794062549620373,
						"acc_stderr,none": 0.03794062549620373,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.58125,
						"acc_norm,none": 0.58125,
						"acc_norm_stderr,none": 0.039125538756915115,
						"acc_stderr,none": 0.039125538756915115,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3575757575757576,
						"acc_norm,none": 0.3575757575757576,
						"acc_norm_stderr,none": 0.03742597043806586,
						"acc_stderr,none": 0.03742597043806586,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.5023923444976076,
						"acc_norm,none": 0.5023923444976076,
						"acc_norm_stderr,none": 0.03466836542150577,
						"acc_stderr,none": 0.03466836542150577,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.36875,
						"acc_norm,none": 0.36875,
						"acc_norm_stderr,none": 0.03826204233503226,
						"acc_stderr,none": 0.03826204233503226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.6641221374045801,
						"acc_norm,none": 0.6641221374045801,
						"acc_norm_stderr,none": 0.04142313771996664,
						"acc_stderr,none": 0.04142313771996664,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.47058823529411764,
						"acc_norm,none": 0.47058823529411764,
						"acc_norm_stderr,none": 0.04295863196118949,
						"acc_stderr,none": 0.04295863196118949,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.5233644859813084,
						"acc_norm,none": 0.5233644859813084,
						"acc_norm_stderr,none": 0.048511241723296745,
						"acc_stderr,none": 0.048511241723296745,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.5386996904024768,
						"acc_norm,none": 0.5386996904024768,
						"acc_norm_stderr,none": 0.02778032359002163,
						"acc_stderr,none": 0.02778032359002163,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.03283472056108567,
						"acc_stderr,none": 0.03283472056108567,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.4972067039106145,
						"acc_norm,none": 0.4972067039106145,
						"acc_norm_stderr,none": 0.03747599962290149,
						"acc_stderr,none": 0.03747599962290149,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.35443037974683544,
						"acc_norm,none": 0.35443037974683544,
						"acc_norm_stderr,none": 0.031137304297185798,
						"acc_stderr,none": 0.031137304297185798,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.04252016223763312,
						"acc_stderr,none": 0.04252016223763312,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.6074766355140186,
						"acc_norm,none": 0.6074766355140186,
						"acc_norm_stderr,none": 0.04742907046004224,
						"acc_stderr,none": 0.04742907046004224,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.39622641509433965,
						"acc_norm,none": 0.39622641509433965,
						"acc_norm_stderr,none": 0.047732492983673595,
						"acc_stderr,none": 0.047732492983673595,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.04766075165356461,
						"acc_stderr,none": 0.04766075165356461,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.04176466758604899,
						"acc_stderr,none": 0.04176466758604899,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.4139194139194139,
						"acc_norm,none": 0.4139194139194139,
						"acc_norm_stderr,none": 0.02986428448611705,
						"acc_stderr,none": 0.02986428448611705,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.45098039215686275,
						"acc_norm,none": 0.45098039215686275,
						"acc_norm_stderr,none": 0.03492406104163613,
						"acc_stderr,none": 0.03492406104163613,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.5146198830409356,
						"acc_norm,none": 0.5146198830409356,
						"acc_norm_stderr,none": 0.038331852752130254,
						"acc_stderr,none": 0.038331852752130254,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.04019027214807402,
						"acc_stderr,none": 0.04019027214807402,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.3237410071942446,
						"acc_norm,none": 0.3237410071942446,
						"acc_norm_stderr,none": 0.039830507521054596,
						"acc_stderr,none": 0.039830507521054596,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.44025157232704404,
						"acc_norm,none": 0.44025157232704404,
						"acc_norm_stderr,none": 0.03949283907134624,
						"acc_stderr,none": 0.03949283907134624,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.5950920245398773,
						"acc_norm,none": 0.5950920245398773,
						"acc_norm_stderr,none": 0.038566721635489125,
						"acc_stderr,none": 0.038566721635489125,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.4476744186046512,
						"acc_norm,none": 0.4476744186046512,
						"acc_norm_stderr,none": 0.0380260016867221,
						"acc_stderr,none": 0.0380260016867221,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.3611111111111111,
						"acc_norm,none": 0.3611111111111111,
						"acc_norm_stderr,none": 0.030317698704780037,
						"acc_stderr,none": 0.030317698704780037,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.398989898989899,
						"acc_norm,none": 0.398989898989899,
						"acc_norm_stderr,none": 0.034889016168527305,
						"acc_stderr,none": 0.034889016168527305,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.5714285714285714,
						"acc_norm,none": 0.5714285714285714,
						"acc_norm_stderr,none": 0.032145368597886394,
						"acc_stderr,none": 0.032145368597886394,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.02836109930007507,
						"acc_stderr,none": 0.02836109930007507,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.3851851851851852,
						"acc_norm,none": 0.3851851851851852,
						"acc_norm_stderr,none": 0.042039210401562783,
						"acc_stderr,none": 0.042039210401562783,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.5034965034965035,
						"acc_norm,none": 0.5034965034965035,
						"acc_norm_stderr,none": 0.04195804195804197,
						"acc_stderr,none": 0.04195804195804197,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.39204545454545453,
						"acc_norm,none": 0.39204545454545453,
						"acc_norm_stderr,none": 0.03690496026403126,
						"acc_stderr,none": 0.03690496026403126,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.4429530201342282,
						"acc_norm,none": 0.4429530201342282,
						"acc_norm_stderr,none": 0.04083136397892789,
						"acc_stderr,none": 0.04083136397892789,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2958579881656805,
						"acc_norm,none": 0.2958579881656805,
						"acc_norm_stderr,none": 0.035214144124964784,
						"acc_stderr,none": 0.035214144124964784,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.3409090909090909,
						"acc_norm,none": 0.3409090909090909,
						"acc_norm_stderr,none": 0.04141487016241484,
						"acc_stderr,none": 0.04141487016241484,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.4152542372881356,
						"acc_norm,none": 0.4152542372881356,
						"acc_norm_stderr,none": 0.045556216394221444,
						"acc_stderr,none": 0.045556216394221444,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.21951219512195122,
						"acc_norm,none": 0.21951219512195122,
						"acc_norm_stderr,none": 0.03242041613395384,
						"acc_stderr,none": 0.03242041613395384,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.04461272175910508,
						"acc_stderr,none": 0.04461272175910508,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.40559440559440557,
						"acc_norm,none": 0.40559440559440557,
						"acc_norm_stderr,none": 0.04120436731133787,
						"acc_stderr,none": 0.04120436731133787,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.04426266681379909,
						"acc_stderr,none": 0.04426266681379909,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.03611575592573069,
						"acc_stderr,none": 0.03611575592573069,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.4941860465116279,
						"acc_norm,none": 0.4941860465116279,
						"acc_norm_stderr,none": 0.03823337064994852,
						"acc_stderr,none": 0.03823337064994852,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.43795620437956206,
						"acc_norm,none": 0.43795620437956206,
						"acc_norm_stderr,none": 0.024502392487713895,
						"acc_stderr,none": 0.024502392487713895,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.7616822429906542,
						"acc_norm,none": 0.7616822429906542,
						"acc_norm_stderr,none": 0.029192770642101563,
						"acc_stderr,none": 0.029192770642101563,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3902439024390244,
						"acc_norm,none": 0.3902439024390244,
						"acc_norm_stderr,none": 0.04416377855732609,
						"acc_stderr,none": 0.04416377855732609,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3442622950819672,
						"acc_norm,none": 0.3442622950819672,
						"acc_norm_stderr,none": 0.04319337331204006,
						"acc_stderr,none": 0.04319337331204006,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.5476190476190477,
						"acc_norm,none": 0.5476190476190477,
						"acc_norm_stderr,none": 0.03442851454672489,
						"acc_stderr,none": 0.03442851454672489,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.5111111111111111,
						"acc_norm,none": 0.5111111111111111,
						"acc_norm_stderr,none": 0.03736252590436864,
						"acc_stderr,none": 0.03736252590436864,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.5661375661375662,
						"acc_norm,none": 0.5661375661375662,
						"acc_norm_stderr,none": 0.036145820389423475,
						"acc_stderr,none": 0.036145820389423475,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3017241379310345,
						"acc_norm,none": 0.3017241379310345,
						"acc_norm_stderr,none": 0.0428025479250546,
						"acc_stderr,none": 0.0428025479250546,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.4896551724137931,
						"acc_norm,none": 0.4896551724137931,
						"acc_norm_stderr,none": 0.04165774775728763,
						"acc_stderr,none": 0.04165774775728763,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.5047619047619047,
						"acc_norm,none": 0.5047619047619047,
						"acc_norm_stderr,none": 0.049026810195176226,
						"acc_stderr,none": 0.049026810195176226,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.5085714285714286,
						"acc_norm,none": 0.5085714285714286,
						"acc_norm_stderr,none": 0.0378993320697706,
						"acc_stderr,none": 0.0378993320697706,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.3412322274881517,
						"acc_norm,none": 0.3412322274881517,
						"acc_norm_stderr,none": 0.03271760807501987,
						"acc_stderr,none": 0.03271760807501987,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.023099237430720333,
						"acc_stderr,none": 0.023099237430720333,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.49137931034482757,
						"acc_norm,none": 0.49137931034482757,
						"acc_norm_stderr,none": 0.032892694731648096,
						"acc_stderr,none": 0.032892694731648096,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.5114942528735632,
						"acc_norm,none": 0.5114942528735632,
						"acc_norm_stderr,none": 0.03800425000198233,
						"acc_stderr,none": 0.03800425000198233,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.4888888888888889,
						"acc_norm,none": 0.4888888888888889,
						"acc_norm_stderr,none": 0.04318275491977976,
						"acc_stderr,none": 0.04318275491977976,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.5088495575221239,
						"acc_norm,none": 0.5088495575221239,
						"acc_norm_stderr,none": 0.03332811194650094,
						"acc_stderr,none": 0.03332811194650094,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.47878787878787876,
						"acc_norm,none": 0.47878787878787876,
						"acc_norm_stderr,none": 0.03900828913737301,
						"acc_stderr,none": 0.03900828913737301,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.036115755925730714,
						"acc_stderr,none": 0.036115755925730714,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.514792899408284,
						"acc_norm,none": 0.514792899408284,
						"acc_norm_stderr,none": 0.038558950703150026,
						"acc_stderr,none": 0.038558950703150026,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.484472049689441,
						"acc_norm,none": 0.484472049689441,
						"acc_norm_stderr,none": 0.03950940416210684,
						"acc_stderr,none": 0.03950940416210684,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.5375,
						"acc_norm,none": 0.5375,
						"acc_norm_stderr,none": 0.039540899134978144,
						"acc_stderr,none": 0.039540899134978144,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.03589254563226399,
						"mcc_stderr,none": 0.01205949891341153
					},
					"copa": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.0377525168068637,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.8384950805008944,
						"likelihood_diff_stderr,none": 0.594207754328924,
						"pct_stereotype,none": 0.5913834227787717,
						"pct_stereotype_stderr,none": 0.06528996286533582
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.4817382230172926,
						"likelihood_diff_stderr,none": 0.08668615039071322,
						"pct_stereotype,none": 0.6201550387596899,
						"pct_stereotype_stderr,none": 0.011855402851295495
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.5013736263736264,
						"likelihood_diff_stderr,none": 0.36130093310271394,
						"pct_stereotype,none": 0.6593406593406593,
						"pct_stereotype_stderr,none": 0.049956709512768704
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 7.056818181818182,
						"likelihood_diff_stderr,none": 2.108516526411107,
						"pct_stereotype,none": 0.6363636363636364,
						"pct_stereotype_stderr,none": 0.15212000482437738
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.315384615384615,
						"likelihood_diff_stderr,none": 0.6921450797271714,
						"pct_stereotype,none": 0.7076923076923077,
						"pct_stereotype_stderr,none": 0.05685286730420954
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.59921875,
						"likelihood_diff_stderr,none": 0.1796813825710064,
						"pct_stereotype,none": 0.634375,
						"pct_stereotype_stderr,none": 0.026964702306061943
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.3489583333333335,
						"likelihood_diff_stderr,none": 0.22520329559296032,
						"pct_stereotype,none": 0.5879629629629629,
						"pct_stereotype_stderr,none": 0.03356787758160831
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.8854166666666665,
						"likelihood_diff_stderr,none": 0.33708777279643,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.357529527559055,
						"likelihood_diff_stderr,none": 0.14793371382383189,
						"pct_stereotype,none": 0.5452755905511811,
						"pct_stereotype_stderr,none": 0.02211455387069532
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.2117117117117115,
						"likelihood_diff_stderr,none": 0.3299937364177086,
						"pct_stereotype,none": 0.6216216216216216,
						"pct_stereotype_stderr,none": 0.046241282338514815
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.192204301075269,
						"likelihood_diff_stderr,none": 0.4318071949535316,
						"pct_stereotype,none": 0.7096774193548387,
						"pct_stereotype_stderr,none": 0.04732351421824122
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 3.9210526315789473,
						"likelihood_diff_stderr,none": 0.22049724441495597,
						"pct_stereotype,none": 0.6947368421052632,
						"pct_stereotype_stderr,none": 0.03349781342677419
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 4.195326475849732,
						"likelihood_diff_stderr,none": 0.13447619034128624,
						"pct_stereotype,none": 0.5623136553369111,
						"pct_stereotype_stderr,none": 0.012118079757777041
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 4.536111111111111,
						"likelihood_diff_stderr,none": 0.5569727554362407,
						"pct_stereotype,none": 0.4,
						"pct_stereotype_stderr,none": 0.05192907868894985
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.855769230769231,
						"likelihood_diff_stderr,none": 0.6910378311088827,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 6.568181818181818,
						"likelihood_diff_stderr,none": 0.9895512606194363,
						"pct_stereotype,none": 0.6818181818181818,
						"pct_stereotype_stderr,none": 0.05777171902747657
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.801791277258567,
						"likelihood_diff_stderr,none": 0.23966032241963944,
						"pct_stereotype,none": 0.6105919003115264,
						"pct_stereotype_stderr,none": 0.027258566978193188
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.198122529644269,
						"likelihood_diff_stderr,none": 0.42508996555199846,
						"pct_stereotype,none": 0.4150197628458498,
						"pct_stereotype_stderr,none": 0.031038785215783234
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 4.493055555555555,
						"likelihood_diff_stderr,none": 0.590679808777988,
						"pct_stereotype,none": 0.7222222222222222,
						"pct_stereotype_stderr,none": 0.05315633121839994
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.5078804347826087,
						"likelihood_diff_stderr,none": 0.2324696373172928,
						"pct_stereotype,none": 0.45869565217391306,
						"pct_stereotype_stderr,none": 0.023258233524708842
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 4.179347826086956,
						"likelihood_diff_stderr,none": 0.5689118426213547,
						"pct_stereotype,none": 0.7304347826086957,
						"pct_stereotype_stderr,none": 0.04155949138579951
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 4.728021978021978,
						"likelihood_diff_stderr,none": 0.45082339456129167,
						"pct_stereotype,none": 0.7912087912087912,
						"pct_stereotype_stderr,none": 0.04284305206509432
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 5.235650510204081,
						"likelihood_diff_stderr,none": 0.41719272803617696,
						"pct_stereotype,none": 0.6887755102040817,
						"pct_stereotype_stderr,none": 0.03315571704943972
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.3031496062992126,
						"exact_match_stderr,none": 0.010198664785988206
					},
					"glue": {
						"acc,none": 0.801542851606087,
						"acc_stderr,none": 0.09924730396109512,
						"alias": "glue",
						"f1,none": 0.85712412474566,
						"f1_stderr,none": 7.441467137764442e-06,
						"mcc,none": 0.03589254563226399,
						"mcc_stderr,none": 0.0001454315140425739
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.001516300227445034,
						"exact_match_stderr,get-answer": 0.0010717793485492673
					},
					"hellaswag": {
						"acc,none": 0.4749053973312089,
						"acc_norm,none": 0.6408086038637721,
						"acc_norm_stderr,none": 0.004787829168255659,
						"acc_stderr,none": 0.004983492928102842,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.277331793242853,
						"acc_norm,none": 0.277331793242853,
						"acc_norm_stderr,none": 0.02768556763224144,
						"acc_stderr,none": 0.02768556763224144,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.046056618647183814,
						"acc_stderr,none": 0.046056618647183814,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.014095022868717593,
						"acc_stderr,none": 0.014095022868717593,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.296,
						"acc_norm,none": 0.296,
						"acc_norm_stderr,none": 0.014442734941575023,
						"acc_stderr,none": 0.014442734941575023,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.014370995982377949,
						"acc_stderr,none": 0.014370995982377949,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.013772206565168546,
						"acc_stderr,none": 0.013772206565168546,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.25166666666666665,
						"acc_norm,none": 0.25166666666666665,
						"acc_norm_stderr,none": 0.01773156149490717,
						"acc_stderr,none": 0.01773156149490717,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.013663187134877667,
						"acc_stderr,none": 0.013663187134877667,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.333,
						"acc_norm,none": 0.333,
						"acc_norm_stderr,none": 0.014910846164229857,
						"acc_stderr,none": 0.014910846164229857,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774164,
						"acc_stderr,none": 0.013877773329774164,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.028085923439997294,
						"acc_stderr,none": 0.028085923439997294,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729486,
						"acc_stderr,none": 0.014013292702729486,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.27692307692307694,
						"acc_norm,none": 0.27692307692307694,
						"acc_norm_stderr,none": 0.03939825345266469,
						"acc_stderr,none": 0.03939825345266469,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909282,
						"acc_stderr,none": 0.04292346959909282,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.274,
						"acc_norm,none": 0.274,
						"acc_norm_stderr,none": 0.014111099288259585,
						"acc_stderr,none": 0.014111099288259585,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.313,
						"acc_norm,none": 0.313,
						"acc_norm_stderr,none": 0.014671272822977886,
						"acc_stderr,none": 0.014671272822977886,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.287,
						"acc_norm,none": 0.287,
						"acc_norm_stderr,none": 0.014312087053809963,
						"acc_stderr,none": 0.014312087053809963,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568196,
						"acc_stderr,none": 0.014029819522568196,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462621,
						"acc_stderr,none": 0.014078856992462621,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.279,
						"acc_norm,none": 0.279,
						"acc_norm_stderr,none": 0.014190150117612028,
						"acc_stderr,none": 0.014190150117612028,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306263,
						"acc_stderr,none": 0.014158794845306263,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.04560480215720683,
						"acc_stderr,none": 0.04560480215720683,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729493,
						"acc_stderr,none": 0.014013292702729493,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.344,
						"acc_norm,none": 0.344,
						"acc_norm_stderr,none": 0.015029633724408947,
						"acc_stderr,none": 0.015029633724408947,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314137,
						"acc_stderr,none": 0.013644675781314137,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.013106173040661778,
						"acc_stderr,none": 0.013106173040661778,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.01432694179723156,
						"acc_stderr,none": 0.01432694179723156,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234187,
						"acc_stderr,none": 0.013807775152234187,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.27666666666666667,
						"acc_norm,none": 0.27666666666666667,
						"acc_norm_stderr,none": 0.01827823460122089,
						"acc_stderr,none": 0.01827823460122089,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.014428554438445512,
						"acc_stderr,none": 0.014428554438445512,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729474,
						"acc_stderr,none": 0.014013292702729474,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.286,
						"acc_norm,none": 0.286,
						"acc_norm_stderr,none": 0.014297146862517908,
						"acc_stderr,none": 0.014297146862517908,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633908,
						"acc_stderr,none": 0.014046255632633908,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.038612291966536955,
						"acc_stderr,none": 0.038612291966536955,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.02469885513168685,
						"acc_stderr,none": 0.02469885513168685,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.01407885699246262,
						"acc_stderr,none": 0.01407885699246262,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.289,
						"acc_norm,none": 0.289,
						"acc_norm_stderr,none": 0.014341711358296186,
						"acc_stderr,none": 0.014341711358296186,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.274,
						"acc_norm,none": 0.274,
						"acc_norm_stderr,none": 0.014111099288259583,
						"acc_stderr,none": 0.014111099288259583,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.03089738243291862,
						"acc_stderr,none": 0.03089738243291862,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.014095022868717614,
						"acc_stderr,none": 0.014095022868717614,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.014370995982377937,
						"acc_stderr,none": 0.014370995982377937,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.0326374172542057,
						"acc_stderr,none": 0.0326374172542057,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.342,
						"acc_norm,none": 0.342,
						"acc_norm_stderr,none": 0.015008706182121728,
						"acc_stderr,none": 0.015008706182121728,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.4841043630782723,
						"acc_norm,none": 0.452,
						"acc_norm_stderr,none": 0.0004963847695390727,
						"acc_stderr,none": 0.03957258033866218,
						"alias": "kobest",
						"f1,none": 0.40584567134752647,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5235042735042735,
						"acc_stderr,none": 0.013334010264781466,
						"alias": " - kobest_boolq",
						"f1,none": 0.4411383386039704,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.499,
						"acc_stderr,none": 0.015819268290576817,
						"alias": " - kobest_copa",
						"f1,none": 0.49815640090992963,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.348,
						"acc_norm,none": 0.452,
						"acc_norm_stderr,none": 0.022279694107843417,
						"acc_stderr,none": 0.021323728632807498,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.34679109587202345,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.4659949622166247,
						"acc_stderr,none": 0.025067769630661905,
						"alias": " - kobest_sentineg",
						"f1,none": 0.3699544855671338,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5673394139336309,
						"acc_stderr,none": 0.0078019799241583355,
						"alias": "lambada",
						"perplexity,none": 6.569091645893632,
						"perplexity_stderr,none": 0.18527322557911466
					},
					"lambada_cloze": {
						"acc,none": 0.6734911701921211,
						"acc_stderr,none": 0.0066274245138747,
						"alias": "lambada_cloze",
						"perplexity,none": 5.771733035039074,
						"perplexity_stderr,none": 0.18955966680759215
					},
					"lambada_multilingual": {
						"acc,none": 0.3825344459538133,
						"acc_stderr,none": 0.07782847016255834,
						"alias": "lambada_multilingual",
						"perplexity,none": 159.92860998449697,
						"perplexity_stderr,none": 126.21127109702662
					},
					"lambada_openai": {
						"acc,none": 0.5594799146128469,
						"acc_stderr,none": 0.0069165127228167575,
						"alias": " - lambada_openai",
						"perplexity,none": 6.619105438644448,
						"perplexity_stderr,none": 0.1897348843226533
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.6757228798758005,
						"acc_stderr,none": 0.006521605716950172,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 5.955842579743194,
						"perplexity_stderr,none": 0.18182812898005146
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.22181253638657092,
						"acc_stderr,none": 0.005788249352644483,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 488.05880178905716,
						"perplexity_stderr,none": 34.474740143266686
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.5594799146128469,
						"acc_stderr,none": 0.006916512722816758,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 6.623817622283293,
						"perplexity_stderr,none": 0.18990514119566282
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.40481273044828253,
						"acc_stderr,none": 0.006838580607651544,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 42.10936123532726,
						"perplexity_stderr,none": 2.2480636493439574
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.4364447894430429,
						"acc_stderr,none": 0.006909473636524467,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 31.641857449105515,
						"perplexity_stderr,none": 1.7198163372686233
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.2901222588783233,
						"acc_stderr,none": 0.006322580641394924,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 231.2092118267118,
						"perplexity_stderr,none": 16.243658384394983
					},
					"lambada_standard": {
						"acc,none": 0.574616728119542,
						"acc_stderr,none": 0.0068879725701178895,
						"alias": " - lambada_standard",
						"perplexity,none": 6.515893892181456,
						"perplexity_stderr,none": 0.17662192004412752
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.6712594605084417,
						"acc_stderr,none": 0.0065446121513527585,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 5.587623490334955,
						"perplexity_stderr,none": 0.14785056227794588
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.3479643765903308,
						"exact_match_stderr,get-answer": 0.012017522990418038
					},
					"logiqa": {
						"acc,none": 0.22887864823348694,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.017719247798458286,
						"acc_stderr,none": 0.01647810727631328,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.25699745547073793,
						"acc_norm,none": 0.29834605597964375,
						"acc_norm_stderr,none": 0.0115433946397798,
						"acc_stderr,none": 0.011024819039416614,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2777219430485762,
						"acc_norm,none": 0.27705192629815745,
						"acc_norm_stderr,none": 0.008192844840426448,
						"acc_stderr,none": 0.008198943594859159,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.6682906163948316,
						"acc_stderr,none": 0.0048456557957000145,
						"alias": "mc_taco",
						"f1,none": 0.05205811138014528,
						"f1_stderr,none": 0.00542512525822136
					},
					"medmcqa": {
						"acc,none": 0.3700693282333254,
						"acc_norm,none": 0.3700693282333254,
						"acc_norm_stderr,none": 0.007466136215544276,
						"acc_stderr,none": 0.007466136215544276,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.373134328358209,
						"acc_norm,none": 0.373134328358209,
						"acc_norm_stderr,none": 0.013560518364022962,
						"acc_stderr,none": 0.013560518364022962,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.43996581683520863,
						"acc_stderr,none": 0.09985373223622389,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4222222222222222,
						"acc_stderr,none": 0.04266763404099582,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3881578947368421,
						"acc_stderr,none": 0.03965842097512744,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.050211673156867795,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4830188679245283,
						"acc_stderr,none": 0.030755120364119905,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4583333333333333,
						"acc_stderr,none": 0.04166666666666665,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.05009082659620333,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.4161849710982659,
						"acc_stderr,none": 0.03758517775404947,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.38235294117647056,
						"acc_stderr,none": 0.04835503696107223,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.04960449637488583,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3702127659574468,
						"acc_stderr,none": 0.03156564682236785,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2807017543859649,
						"acc_stderr,none": 0.042270544512321984,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4068965517241379,
						"acc_stderr,none": 0.04093793981266237,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.30158730158730157,
						"acc_stderr,none": 0.023636975996101803,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.38095238095238093,
						"acc_stderr,none": 0.04343525428949098,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.532258064516129,
						"acc_stderr,none": 0.028384747788813336,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.3497536945812808,
						"acc_stderr,none": 0.03355400904969565,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.05024183937956911,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.6242424242424243,
						"acc_stderr,none": 0.03781887353205983,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.5505050505050505,
						"acc_stderr,none": 0.035441324919479704,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5647668393782384,
						"acc_stderr,none": 0.03578038165008585,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.4461538461538462,
						"acc_stderr,none": 0.02520357177302833,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3111111111111111,
						"acc_stderr,none": 0.028226446749683505,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.4789915966386555,
						"acc_stderr,none": 0.032449808499900284,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.31788079470198677,
						"acc_stderr,none": 0.038020397601079024,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.6422018348623854,
						"acc_stderr,none": 0.020552060784827825,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.38425925925925924,
						"acc_stderr,none": 0.03317354514310742,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.03509312031717982,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.6666666666666666,
						"acc_stderr,none": 0.030685820596610812,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.5067264573991032,
						"acc_stderr,none": 0.03355476596234354,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3969465648854962,
						"acc_stderr,none": 0.04291135671009224,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.393836344314559,
						"acc_stderr,none": 0.11068831355914426,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.5537190082644629,
						"acc_stderr,none": 0.0453793517794788,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5462962962962963,
						"acc_stderr,none": 0.04812917324536823,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.50920245398773,
						"acc_stderr,none": 0.03927705600787443,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.36607142857142855,
						"acc_stderr,none": 0.0457237235873743,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.6407766990291263,
						"acc_stderr,none": 0.047504583990416946,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.7094017094017094,
						"acc_stderr,none": 0.02974504857267406,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.049431107042371025,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5951468710089399,
						"acc_stderr,none": 0.017553246467720256,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.47398843930635837,
						"acc_stderr,none": 0.026882643434022895,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.45098039215686275,
						"acc_stderr,none": 0.028491993586171563,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.5043450273575797,
						"acc_stderr,none": 0.08747228668961055,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.45980707395498394,
						"acc_stderr,none": 0.028306190403305696,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.45987654320987653,
						"acc_stderr,none": 0.02773102275353928,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.36879432624113473,
						"acc_stderr,none": 0.028782227561347243,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.31681877444589307,
						"acc_stderr,none": 0.011882349954723008,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.43014705882352944,
						"acc_stderr,none": 0.030074971917302875,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.41830065359477125,
						"acc_stderr,none": 0.019955975145835546,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.5272727272727272,
						"acc_stderr,none": 0.04782001791380062,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.4489795918367347,
						"acc_stderr,none": 0.03184213866687579,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.49951251218719533,
						"acc_stderr,none": 0.08013036553983305,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.6119402985074627,
						"acc_stderr,none": 0.03445789964362749,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3872502378686965,
						"acc_stderr,none": 0.07785493894311979,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.050251890762960605,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3674698795180723,
						"acc_stderr,none": 0.03753267402120574,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.45614035087719296,
						"acc_stderr,none": 0.03820042586602966,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.6231278655119715,
						"acc_stderr,none": 0.0048917310724433645,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.6223555736371034,
						"acc_stderr,none": 0.004889472223330181,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7965686274509803,
						"acc_stderr,none": 0.01995369635088925,
						"alias": "mrpc",
						"f1,none": 0.8654781199351702,
						"f1_stderr,none": 0.01464197325476066
					},
					"multimedqa": {
						"acc,none": 0.4063875088715401,
						"acc_norm,none": 0.37056429084523906,
						"acc_norm_stderr,none": 0.00010241543372288454,
						"acc_stderr,none": 0.07686893603849891,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.09653465346534654,
						"acc_stderr,none": 0.004241904245616562,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6898984215800434,
						"mrr_stderr,none": 0.010372541196019847,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4198645598194131,
						"r@2_stderr,none": 0.016590049131936787
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6476674208051734,
						"mrr_stderr,none": 0.01043460233713634,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4796839729119639,
						"r@2_stderr,none": 0.016793436269683173
					},
					"openbookqa": {
						"acc,none": 0.328,
						"acc_norm,none": 0.412,
						"acc_norm_stderr,none": 0.02203367799374087,
						"acc_stderr,none": 0.021017027165175495,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.011102325468811016,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3705,
						"acc_stderr,none": 0.010801537464907347,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4005,
						"acc_stderr,none": 0.01095946759496034,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.465,
						"acc_stderr,none": 0.011155703691943112,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.4735,
						"acc_stderr,none": 0.011167418260963933,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.011170245619215438,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.418,
						"acc_stderr,none": 0.011031720148042086,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.44164285714285717,
						"acc_stderr,none": 0.03360326465165433,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7633297062023939,
						"acc_norm,none": 0.7758433079434167,
						"acc_norm_stderr,none": 0.009729897956410048,
						"acc_stderr,none": 0.009916841655042809,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.34148163962425276,
						"acc_norm,none": 0.35172929120409907,
						"acc_norm_stderr,none": 0.003488635688231787,
						"acc_stderr,none": 0.003464501664986716,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.678,
						"acc_stderr,none": 0.020916668330019882,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7457031171656568,
						"acc_norm,none": 0.6322163804783708,
						"acc_norm_stderr,none": 0.0043889885293719125,
						"acc_stderr,none": 0.15599252416803436,
						"alias": "pythia",
						"bits_per_byte,none": 0.8013693061718576,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.74275444362767,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 6.619105438644448,
						"perplexity_stderr,none": 0.1897348843226533,
						"word_perplexity,none": 19.49838607196728,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4379432624113475,
						"acc_norm,none": 0.4787234042553192,
						"acc_norm_stderr,none": 0.04936844264079171,
						"acc_stderr,none": 0.04260013524670006,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.5166666666666667,
						"acc_norm,none": 0.5833333333333334,
						"acc_norm_stderr,none": 0.0451938453788867,
						"acc_stderr,none": 0.045809453927047654,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.41875,
						"acc_norm,none": 0.50625,
						"acc_norm_stderr,none": 0.03964948130713095,
						"acc_stderr,none": 0.03912553875691512,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.4154929577464789,
						"acc_norm,none": 0.41901408450704225,
						"acc_norm_stderr,none": 0.029329448381681836,
						"acc_stderr,none": 0.02929432462367856,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.7616694124107634,
						"acc_stderr,none": 0.005764973318052531,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.8886965124907247,
						"acc_stderr,none": 0.001564172421808576,
						"alias": "qqp",
						"f1,none": 0.8568064659835805,
						"f1_stderr,none": 0.002113719432793249
					},
					"race": {
						"acc,none": 0.44593301435406696,
						"acc_stderr,none": 0.01538387351211463,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.8014440433212996,
						"acc_stderr,none": 0.024011733902867625,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.966,
						"acc_norm,none": 0.96,
						"acc_norm_stderr,none": 0.006199874066337032,
						"acc_stderr,none": 0.005733836139695461,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.8014440433212996,
						"acc_stderr,none": 0.024011733902867625,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.944954128440367,
						"acc_stderr,none": 0.007727849615706705,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5084474657602719,
						"acc_norm,none": 0.6911426572028392,
						"acc_norm_stderr,none": 0.003266581789994963,
						"acc_stderr,none": 0.0035345874849746425,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.7910219293867092,
						"acc_stderr,none": 0.07311353664556729,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.835136217948718,
						"acc_stderr,none": 0.0037137336511761853,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.9404074186682883,
						"acc_stderr,none": 0.002383326635738082,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.6033333333333334,
						"acc_stderr,none": 0.004844095733879216,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3933979946240904,
						"acc_stderr,none": 0.062167434079411894,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.7037943696450428,
						"bleu_acc_stderr,none": 0.00025547531237864933,
						"bleu_diff,none": 17.725051646302095,
						"bleu_diff_stderr,none": 0.4690767892279991,
						"bleu_max,none": 21.05783221282457,
						"bleu_max_stderr,none": 0.5018198156702414,
						"rouge1_acc,none": 0.7441860465116279,
						"rouge1_acc_stderr,none": 0.00023330045917772135,
						"rouge1_diff,none": 40.374563354976026,
						"rouge1_diff_stderr,none": 2.0957641410966774,
						"rouge1_max,none": 49.87215207168784,
						"rouge1_max_stderr,none": 1.5554072752397459,
						"rouge2_acc,none": 0.4700122399020808,
						"rouge2_acc_stderr,none": 0.0003052705076523413,
						"rouge2_diff,none": 36.50545880041624,
						"rouge2_diff_stderr,none": 2.2860834352706494,
						"rouge2_max,none": 38.4969887926062,
						"rouge2_max_stderr,none": 2.0849267673981364,
						"rougeL_acc,none": 0.7405140758873929,
						"rougeL_acc_stderr,none": 0.00023548159227945327,
						"rougeL_diff,none": 40.26038709184385,
						"rougeL_diff_stderr,none": 2.102396565924826,
						"rougeL_max,none": 49.32430704016434,
						"rougeL_max_stderr,none": 1.5785700559414388
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.7037943696450428,
						"bleu_acc_stderr,none": 0.0159835951018114,
						"bleu_diff,none": 17.725051646302095,
						"bleu_diff_stderr,none": 0.6848918084106417,
						"bleu_max,none": 21.05783221282457,
						"bleu_max_stderr,none": 0.7083924164403804,
						"rouge1_acc,none": 0.7441860465116279,
						"rouge1_acc_stderr,none": 0.015274176219283361,
						"rouge1_diff,none": 40.374563354976026,
						"rouge1_diff_stderr,none": 1.4476754267088592,
						"rouge1_max,none": 49.87215207168784,
						"rouge1_max_stderr,none": 1.247159683135943,
						"rouge2_acc,none": 0.4700122399020808,
						"rouge2_acc_stderr,none": 0.017471992091697537,
						"rouge2_diff,none": 36.50545880041624,
						"rouge2_diff_stderr,none": 1.511979971848387,
						"rouge2_max,none": 38.4969887926062,
						"rouge2_max_stderr,none": 1.4439275492205752,
						"rougeL_acc,none": 0.7405140758873929,
						"rougeL_acc_stderr,none": 0.015345409485557994,
						"rougeL_diff,none": 40.26038709184385,
						"rougeL_diff_stderr,none": 1.44996433263885,
						"rougeL_max,none": 49.32430704016434,
						"rougeL_max_stderr,none": 1.25641157903827
					},
					"truthfulqa_mc1": {
						"acc,none": 0.26560587515299877,
						"acc_stderr,none": 0.0154610276272536,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.45729405435963627,
						"acc_stderr,none": 0.015230808670616297,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.3031496062992126,
						"exact_match_stderr,none": 0.010198664785988206
					},
					"wic": {
						"acc,none": 0.7100313479623824,
						"acc_stderr,none": 0.017978132228329904,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.8013693061718576,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.74275444362767,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 19.49838607196728,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6574585635359116,
						"acc_stderr,none": 0.013337483579075925,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5915492957746479,
						"acc_stderr,none": 0.05875113694257524,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.5865384615384616,
						"acc_stderr,none": 0.04852294969729053,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7802197802197802,
						"acc_stderr,none": 0.025108358900325773,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.038320767835798686,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.022382357781962143,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.02232498173838525,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.02206494331392886,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.02233126442325838,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.02233718647904429,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.02234794983266809,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.022033677993740862,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.021728881438701716,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.37627844712182057,
						"acc_stderr,none": 0.03592781598574345,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3586345381526104,
						"acc_stderr,none": 0.009613164900909873,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3477911646586345,
						"acc_stderr,none": 0.00954641176984314,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.43293172690763054,
						"acc_stderr,none": 0.009931501976863056,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.35582329317269074,
						"acc_stderr,none": 0.009596375814335275,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.4497991967871486,
						"acc_stderr,none": 0.009971431255560168,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.36305220883534134,
						"acc_stderr,none": 0.009638823133984984,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4742971887550201,
						"acc_stderr,none": 0.010008822253312044,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.38353413654618473,
						"acc_stderr,none": 0.009746396613443772,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.3751004016064257,
						"acc_stderr,none": 0.009704349720814057,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.00946022348499647,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3477911646586345,
						"acc_stderr,none": 0.00954641176984314,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3385542168674699,
						"acc_stderr,none": 0.00948525020851688,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.37389558232931724,
						"acc_stderr,none": 0.009698087600721321,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.37670682730923694,
						"acc_stderr,none": 0.009712599529552992,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3313253012048193,
						"acc_stderr,none": 0.00943457405610197,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.574153179712412,
						"acc_stderr,none": 0.06115222376695438,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5751158173395102,
						"acc_stderr,none": 0.012721094073523329,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7326273990734613,
						"acc_stderr,none": 0.011389680853034757,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6532097948378557,
						"acc_stderr,none": 0.012248172150852325,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5082726671078756,
						"acc_stderr,none": 0.012865364020375395,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5618795499669094,
						"acc_stderr,none": 0.012768206616277762,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5565850430178689,
						"acc_stderr,none": 0.012784462136657198,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.48908007941760423,
						"acc_stderr,none": 0.012864056278255048,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5228325612177366,
						"acc_stderr,none": 0.012853702384870849,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5274652547981469,
						"acc_stderr,none": 0.012847698270388227,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.57180675049636,
						"acc_stderr,none": 0.01273374279951516,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6168100595632032,
						"acc_stderr,none": 0.012511065565305199,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7271296920656327,
						"acc_stderr,none": 0.07765926605467739,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8361290322580646,
						"acc_stderr,none": 0.007678379958837628,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6626506024096386,
						"acc_stderr,none": 0.05221260262032129,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5651720542231491,
						"acc_stderr,none": 0.016016451724190764,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6539923954372624,
						"acc_stderr,none": 0.029388574800545037,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.526984126984127,
						"acc_stderr,none": 0.028175510942128692,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7063492063492064,
						"acc_stderr,none": 0.020306792341159757,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "bigscience/bloomz-7b1-mt"
	},
	"facebook/opt-1.3b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.459695603156708,
						"acc_norm,none": 0.44193912063134166,
						"acc_norm_stderr,none": 0.03551628247230571,
						"acc_stderr,none": 0.054508449452125976,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3353125,
						"acc_stderr,none": 0.014626883137797381,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.00665,
						"acc_stderr,none": 0.007156097138676083,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8471492537313433,
						"acc_stderr,none": 0.13906108016441865,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.22956909361069835,
						"acc_norm,none": 0.22956909361069835,
						"acc_norm_stderr,none": 0.10768579644244232,
						"acc_stderr,none": 0.10768579644244232,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2527197375237437,
						"acc_norm,none": 0.2527197375237437,
						"acc_norm_stderr,none": 0.03500014762774971,
						"acc_stderr,none": 0.03500014762774971,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.52539877757901,
						"likelihood_diff_stderr,none": 0.5057507734307919,
						"pct_stereotype,none": 0.5403995229576624,
						"pct_stereotype_stderr,none": 0.09917260811641838
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.04675196850393701,
						"exact_match_stderr,none": 0.004684335017570898
					},
					"glue": {
						"acc,none": 0.477390729286622,
						"acc_stderr,none": 0.0749640103494011,
						"alias": "glue",
						"f1,none": 0.25270586492466257,
						"f1_stderr,none": 0.002585589874293703,
						"mcc,none": -0.08120492065575208,
						"mcc_stderr,none": 0.000827846082807807
					},
					"kmmlu": {
						"acc,none": 0.09734334392145538,
						"acc_norm,none": 0.09734334392145538,
						"acc_norm_stderr,none": 0.06507895490528554,
						"acc_stderr,none": 0.06507895490528554,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.46963385222538917,
						"acc_norm,none": 0.402,
						"acc_norm_stderr,none": 0.00048175551102203995,
						"acc_stderr,none": 0.04713293481394482,
						"alias": "kobest",
						"f1,none": 0.36190417759808324,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5393945274597322,
						"acc_stderr,none": 0.014950657730299681,
						"alias": "lambada",
						"perplexity,none": 8.629010456854761,
						"perplexity_stderr,none": 0.8280338341713326
					},
					"lambada_cloze": {
						"acc,none": 0.03580438579468271,
						"acc_stderr,none": 0.003264060801138701,
						"alias": "lambada_cloze",
						"perplexity,none": 599.2178309769854,
						"perplexity_stderr,none": 42.71421559379285
					},
					"lambada_multilingual": {
						"acc,none": 0.2986997865321172,
						"acc_stderr,none": 0.07790139997300333,
						"alias": "lambada_multilingual",
						"perplexity,none": 261.46166415418185,
						"perplexity_stderr,none": 86.99322977221938
					},
					"mmlu": {
						"acc,none": 0.25067654180316196,
						"acc_stderr,none": 0.03931635941693457,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.25844845908607866,
						"acc_stderr,none": 0.033664534052158475,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2471837785645317,
						"acc_stderr,none": 0.04120015887058747,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.23041923951901203,
						"acc_stderr,none": 0.03702372565904026,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2622898826514431,
						"acc_stderr,none": 0.04370383754003168,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2916962384669979,
						"acc_norm,none": 0.2747816944388267,
						"acc_norm_stderr,none": 0.00010506467337814115,
						"acc_stderr,none": 0.07899167094603525,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4997857142857143,
						"acc_stderr,none": 0.03871039585701613,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7178812736221523,
						"acc_norm,none": 0.44996122278655265,
						"acc_norm_stderr,none": 0.00400119153608266,
						"acc_stderr,none": 0.1468283150440037,
						"alias": "pythia",
						"bits_per_byte,none": 0.7560726480145469,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6888868178965153,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 7.028865458851284,
						"perplexity_stderr,none": 0.1842057018833793,
						"word_perplexity,none": 16.484762135338798,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.32978723404255317,
						"acc_norm,none": 0.40602836879432624,
						"acc_norm_stderr,none": 0.044816032059822264,
						"acc_stderr,none": 0.0414474796001638,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.4713653455791821,
						"acc_stderr,none": 0.038676102976220705,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3372436224631588,
						"acc_stderr,none": 0.048121845997234285,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3402692778457772,
						"bleu_acc_stderr,none": 0.00027510551029423286,
						"bleu_diff,none": -5.672266529054544,
						"bleu_diff_stderr,none": 0.43096721928515636,
						"bleu_max,none": 19.007171908637446,
						"bleu_max_stderr,none": 0.4378366396008561,
						"rouge1_acc,none": 0.2717258261933905,
						"rouge1_acc_stderr,none": 0.00024251335977072583,
						"rouge1_diff,none": -9.611533086730972,
						"rouge1_diff_stderr,none": 0.7251258672009934,
						"rouge1_max,none": 40.12410355796137,
						"rouge1_max_stderr,none": 0.8195632275289366,
						"rouge2_acc,none": 0.1909424724602203,
						"rouge2_acc_stderr,none": 0.00018931794690073162,
						"rouge2_diff,none": -10.894202572385344,
						"rouge2_diff_stderr,none": 0.8343287759058793,
						"rouge2_max,none": 23.76900565755557,
						"rouge2_max_stderr,none": 0.8550033961054041,
						"rougeL_acc,none": 0.25703794369645044,
						"rougeL_acc_stderr,none": 0.0002340311754862133,
						"rougeL_diff,none": -9.511270603625562,
						"rougeL_diff_stderr,none": 0.6961023478754015,
						"rougeL_max,none": 37.6917411453721,
						"rougeL_max_stderr,none": 0.796072313119905
					},
					"xcopa": {
						"acc,none": 0.5218181818181817,
						"acc_stderr,none": 0.029112404859548864,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.36238286479250337,
						"acc_stderr,none": 0.04736041571640584,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5127850309848986,
						"acc_stderr,none": 0.05850374929654006,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.6709372892784895,
						"acc_stderr,none": 0.0789358842173095,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.459695603156708,
						"acc_norm,none": 0.44193912063134166,
						"acc_norm_stderr,none": 0.03551628247230571,
						"acc_stderr,none": 0.054508449452125976,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3353125,
						"acc_stderr,none": 0.014626883137797381,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.337,
						"acc_stderr,none": 0.014955087918653614,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.339,
						"acc_stderr,none": 0.014976758771620345,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3308333333333333,
						"acc_stderr,none": 0.013588208070709002,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.2295221843003413,
						"acc_norm,none": 0.29692832764505117,
						"acc_norm_stderr,none": 0.013352025976725225,
						"acc_stderr,none": 0.012288926760890792,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.5732323232323232,
						"acc_norm,none": 0.5134680134680135,
						"acc_norm_stderr,none": 0.01025606085484075,
						"acc_stderr,none": 0.010149141043955635,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.00665,
						"acc_stderr,none": 0.007156097138676083,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0235,
						"acc_stderr,none": 0.003388158025742493,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.013,
						"acc_stderr,none": 0.002533517190523329,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0235,
						"acc_stderr,none": 0.00338815802574248,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0045,
						"acc_stderr,none": 0.0014969954902233234,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521528,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000037,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0039045553145336228,
						"acc_stderr,none": 0.0012992568927018468,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8471492537313433,
						"acc_stderr,none": 0.13906108016441865,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942323,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448847,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329836,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617331,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400241,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.012841374572096928,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.673,
						"acc_stderr,none": 0.014842213153411247,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.819,
						"acc_stderr,none": 0.012181436179177904,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.011075814808567038,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469334,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036216,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792947,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.0068954729748979095,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.0071508835212954315,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753653,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139969,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611487,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753651,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.013512312258920826,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.822,
						"acc_stderr,none": 0.012102167676183597,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.00969892102602495,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491104,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.831,
						"acc_stderr,none": 0.011856625977890129,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306461,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.354,
						"acc_stderr,none": 0.015129868238451773,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745906,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.01284137457209692,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.723,
						"acc_stderr,none": 0.014158794845306265,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341674,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557422,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240648,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.0102813280127474,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.592,
						"acc_stderr,none": 0.015549205052920675,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524315,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.717,
						"acc_stderr,none": 0.014251810906481756,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.014356395999905687,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.014111099288259585,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998097,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.733,
						"acc_stderr,none": 0.01399667485179627,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.00949157995752506,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074794,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.813,
						"acc_stderr,none": 0.012336254828074116,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897892,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329825,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336666,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.776,
						"acc_stderr,none": 0.013190830072364483,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.381,
						"acc_stderr,none": 0.015364734787007436,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.005568393575081365,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400248,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426496,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816334,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.015803979428161946,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.864,
						"acc_stderr,none": 0.01084535023047299,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.0068297617561409165,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.012259457340938596,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.0115694793682713,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.01028132801274739,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160333,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936713,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.005814534272734941,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796393,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.00420638724961147,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.004536472151306523,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.472,
						"acc_stderr,none": 0.015794475789511472,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.413,
						"acc_stderr,none": 0.015577986829936531,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.5669724770642202,
						"acc_stderr,none": 0.00866625130551805,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.39285714285714285,
						"acc_stderr,none": 0.0658538889806635,
						"alias": "cb",
						"f1,none": 0.20571590265987552,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.22956909361069835,
						"acc_norm,none": 0.22956909361069835,
						"acc_norm_stderr,none": 0.10768579644244232,
						"acc_stderr,none": 0.10768579644244232,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736841,
						"acc_stderr,none": 0.05263157894736841,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757577,
						"acc_stderr,none": 0.07575757575757577,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.06429065810876616,
						"acc_stderr,none": 0.06429065810876616,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445796,
						"acc_stderr,none": 0.06180629713445796,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.06861056852129647,
						"acc_stderr,none": 0.06861056852129647,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.16129032258064516,
						"acc_norm,none": 0.16129032258064516,
						"acc_norm_stderr,none": 0.06715051611181073,
						"acc_stderr,none": 0.06715051611181073,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755131,
						"acc_stderr,none": 0.08780518530755131,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271772,
						"acc_stderr,none": 0.10163945352271772,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.06957698714453991,
						"acc_stderr,none": 0.06957698714453991,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.06148754619013454,
						"acc_stderr,none": 0.06148754619013454,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2527197375237437,
						"acc_norm,none": 0.2527197375237437,
						"acc_norm_stderr,none": 0.03500014762774971,
						"acc_stderr,none": 0.03500014762774971,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.24880382775119617,
						"acc_norm,none": 0.24880382775119617,
						"acc_norm_stderr,none": 0.029975990636702532,
						"acc_stderr,none": 0.029975990636702532,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865143,
						"acc_stderr,none": 0.03462157845865143,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.25190839694656486,
						"acc_norm,none": 0.25190839694656486,
						"acc_norm_stderr,none": 0.03807387116306086,
						"acc_stderr,none": 0.03807387116306086,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037267799624996496,
						"acc_stderr,none": 0.037267799624996496,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.04218811928205305,
						"acc_stderr,none": 0.04218811928205305,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25077399380804954,
						"acc_norm,none": 0.25077399380804954,
						"acc_norm_stderr,none": 0.024155705949743284,
						"acc_stderr,none": 0.024155705949743284,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604257,
						"acc_stderr,none": 0.030587591351604257,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.25139664804469275,
						"acc_norm,none": 0.25139664804469275,
						"acc_norm_stderr,none": 0.032515888371841106,
						"acc_stderr,none": 0.032515888371841106,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.02830465794303529,
						"acc_stderr,none": 0.02830465794303529,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.045223500773820306,
						"acc_stderr,none": 0.045223500773820306,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3113207547169811,
						"acc_norm,none": 0.3113207547169811,
						"acc_norm_stderr,none": 0.0451874553177075,
						"acc_stderr,none": 0.0451874553177075,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980982,
						"acc_stderr,none": 0.03957835471980982,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.24175824175824176,
						"acc_norm,none": 0.24175824175824176,
						"acc_norm_stderr,none": 0.025960319996852693,
						"acc_stderr,none": 0.025960319996852693,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604246,
						"acc_stderr,none": 0.030587591351604246,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2573099415204678,
						"acc_norm,none": 0.2573099415204678,
						"acc_norm_stderr,none": 0.03352799844161865,
						"acc_stderr,none": 0.03352799844161865,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.25170068027210885,
						"acc_norm,none": 0.25170068027210885,
						"acc_norm_stderr,none": 0.03591728013761648,
						"acc_stderr,none": 0.03591728013761648,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2446043165467626,
						"acc_norm,none": 0.2446043165467626,
						"acc_norm_stderr,none": 0.03659146222520568,
						"acc_stderr,none": 0.03659146222520568,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.034229240176444506,
						"acc_stderr,none": 0.034229240176444506,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25153374233128833,
						"acc_norm,none": 0.25153374233128833,
						"acc_norm_stderr,none": 0.03408997886857529,
						"acc_stderr,none": 0.03408997886857529,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.028394293050790515,
						"acc_stderr,none": 0.028394293050790515,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.030532892233932032,
						"acc_stderr,none": 0.030532892233932032,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.27310924369747897,
						"acc_norm,none": 0.27310924369747897,
						"acc_norm_stderr,none": 0.028942004040998164,
						"acc_stderr,none": 0.028942004040998164,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2782608695652174,
						"acc_norm,none": 0.2782608695652174,
						"acc_norm_stderr,none": 0.029614094221633722,
						"acc_stderr,none": 0.029614094221633722,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.24475524475524477,
						"acc_norm,none": 0.24475524475524477,
						"acc_norm_stderr,none": 0.036079930330813775,
						"acc_stderr,none": 0.036079930330813775,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032732683535398856,
						"acc_stderr,none": 0.032732683535398856,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2483221476510067,
						"acc_norm,none": 0.2483221476510067,
						"acc_norm_stderr,none": 0.0355134404169743,
						"acc_stderr,none": 0.0355134404169743,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714263,
						"acc_stderr,none": 0.04025566684714263,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.03642192783741706,
						"acc_stderr,none": 0.03642192783741706,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604674,
						"acc_stderr,none": 0.03893259610604674,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25060827250608275,
						"acc_norm,none": 0.25060827250608275,
						"acc_norm_stderr,none": 0.021402288814095338,
						"acc_stderr,none": 0.021402288814095338,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.029761395837435988,
						"acc_stderr,none": 0.029761395837435988,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.25203252032520324,
						"acc_norm,none": 0.25203252032520324,
						"acc_norm_stderr,none": 0.039308795268239924,
						"acc_stderr,none": 0.039308795268239924,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2540983606557377,
						"acc_norm,none": 0.2540983606557377,
						"acc_norm_stderr,none": 0.03957756102798664,
						"acc_stderr,none": 0.03957756102798664,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2523809523809524,
						"acc_norm,none": 0.2523809523809524,
						"acc_norm_stderr,none": 0.03004659915603149,
						"acc_stderr,none": 0.03004659915603149,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032364888900157734,
						"acc_stderr,none": 0.032364888900157734,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871163,
						"acc_stderr,none": 0.03152480234871163,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.25517241379310346,
						"acc_norm,none": 0.25517241379310346,
						"acc_norm_stderr,none": 0.03632984052707842,
						"acc_stderr,none": 0.03632984052707842,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055042,
						"acc_stderr,none": 0.04232473532055042,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.032888897342098225,
						"acc_stderr,none": 0.032888897342098225,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.25118483412322273,
						"acc_norm,none": 0.25118483412322273,
						"acc_norm_stderr,none": 0.029927771242945208,
						"acc_stderr,none": 0.029927771242945208,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2473404255319149,
						"acc_norm,none": 0.2473404255319149,
						"acc_norm_stderr,none": 0.022280822212812246,
						"acc_stderr,none": 0.022280822212812246,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.028490144114909487,
						"acc_stderr,none": 0.028490144114909487,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.25287356321839083,
						"acc_norm,none": 0.25287356321839083,
						"acc_norm_stderr,none": 0.0330465186437516,
						"acc_stderr,none": 0.0330465186437516,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.028952167450890808,
						"acc_stderr,none": 0.028952167450890808,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2422360248447205,
						"acc_norm,none": 0.2422360248447205,
						"acc_norm_stderr,none": 0.03387086996153082,
						"acc_stderr,none": 0.03387086996153082,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.08120492065575208,
						"mcc_stderr,none": 0.028772314519478738
					},
					"copa": {
						"acc,none": 0.82,
						"acc_stderr,none": 0.03861229196653697,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.52539877757901,
						"likelihood_diff_stderr,none": 0.5057507734307919,
						"pct_stereotype,none": 0.5403995229576624,
						"pct_stereotype_stderr,none": 0.09917260811641838
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.3606887298747763,
						"likelihood_diff_stderr,none": 0.0824449288360497,
						"pct_stereotype,none": 0.631484794275492,
						"pct_stereotype_stderr,none": 0.011783441439938284
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.5673076923076925,
						"likelihood_diff_stderr,none": 0.36138442344982585,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.051282051282051246
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.6477272727272725,
						"likelihood_diff_stderr,none": 2.0851761821831265,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.855769230769231,
						"likelihood_diff_stderr,none": 0.5429787770188624,
						"pct_stereotype,none": 0.7230769230769231,
						"pct_stereotype_stderr,none": 0.055934767585573
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.44140625,
						"likelihood_diff_stderr,none": 0.15364087632914586,
						"pct_stereotype,none": 0.653125,
						"pct_stereotype_stderr,none": 0.026649515182883866
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.3072916666666665,
						"likelihood_diff_stderr,none": 0.22122395565990718,
						"pct_stereotype,none": 0.5833333333333334,
						"pct_stereotype_stderr,none": 0.03362277436608043
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.71875,
						"likelihood_diff_stderr,none": 0.33018938035239126,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.1518208661417324,
						"likelihood_diff_stderr,none": 0.1458781042828142,
						"pct_stereotype,none": 0.5413385826771654,
						"pct_stereotype_stderr,none": 0.022129755490549064
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.4887387387387387,
						"likelihood_diff_stderr,none": 0.31001385974980483,
						"pct_stereotype,none": 0.7567567567567568,
						"pct_stereotype_stderr,none": 0.04090743073860918
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.452956989247312,
						"likelihood_diff_stderr,none": 0.4573624139853457,
						"pct_stereotype,none": 0.8279569892473119,
						"pct_stereotype_stderr,none": 0.03934852812061863
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 3.6710526315789473,
						"likelihood_diff_stderr,none": 0.21825398824013698,
						"pct_stereotype,none": 0.6368421052631579,
						"pct_stereotype_stderr,none": 0.03498104083833203
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.6903324388789507,
						"likelihood_diff_stderr,none": 0.08992201815107946,
						"pct_stereotype,none": 0.4502087060226595,
						"pct_stereotype_stderr,none": 0.012152590574174896
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.486111111111111,
						"likelihood_diff_stderr,none": 0.32377111158230604,
						"pct_stereotype,none": 0.4444444444444444,
						"pct_stereotype_stderr,none": 0.052671718126664185
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.644230769230769,
						"likelihood_diff_stderr,none": 0.8854511102477953,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.503787878787879,
						"likelihood_diff_stderr,none": 0.5031223147937559,
						"pct_stereotype,none": 0.5909090909090909,
						"pct_stereotype_stderr,none": 0.060983672113630656
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.8769470404984423,
						"likelihood_diff_stderr,none": 0.1545610244574755,
						"pct_stereotype,none": 0.4735202492211838,
						"pct_stereotype_stderr,none": 0.027911625198936637
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.396245059288537,
						"likelihood_diff_stderr,none": 0.2381834371254848,
						"pct_stereotype,none": 0.2924901185770751,
						"pct_stereotype_stderr,none": 0.028656396908494274
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 4.760416666666667,
						"likelihood_diff_stderr,none": 0.5616884142534654,
						"pct_stereotype,none": 0.4861111111111111,
						"pct_stereotype_stderr,none": 0.05931618532716555
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.452717391304348,
						"likelihood_diff_stderr,none": 0.17702647579615602,
						"pct_stereotype,none": 0.31521739130434784,
						"pct_stereotype_stderr,none": 0.021685782795019003
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.6619565217391306,
						"likelihood_diff_stderr,none": 0.38888903595627766,
						"pct_stereotype,none": 0.6608695652173913,
						"pct_stereotype_stderr,none": 0.04433930011819815
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.5,
						"likelihood_diff_stderr,none": 0.3098652667152024,
						"pct_stereotype,none": 0.7582417582417582,
						"pct_stereotype_stderr,none": 0.04513082148355003
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.955676020408163,
						"likelihood_diff_stderr,none": 0.28049010363745075,
						"pct_stereotype,none": 0.5969387755102041,
						"pct_stereotype_stderr,none": 0.035126356077670465
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.04675196850393701,
						"exact_match_stderr,none": 0.004684335017570898
					},
					"glue": {
						"acc,none": 0.477390729286622,
						"acc_stderr,none": 0.0749640103494011,
						"alias": "glue",
						"f1,none": 0.25270586492466257,
						"f1_stderr,none": 0.002585589874293703,
						"mcc,none": -0.08120492065575208,
						"mcc_stderr,none": 0.000827846082807807
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.012130401819560273,
						"exact_match_stderr,get-answer": 0.003015294242890933
					},
					"hellaswag": {
						"acc,none": 0.4138617805218084,
						"acc_norm,none": 0.5371439952200757,
						"acc_norm_stderr,none": 0.004975993795562032,
						"acc_stderr,none": 0.004915177406956261,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.09734334392145538,
						"acc_norm,none": 0.09734334392145538,
						"acc_norm_stderr,none": 0.06507895490528554,
						"acc_stderr,none": 0.06507895490528554,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.03861229196653697,
						"acc_stderr,none": 0.03861229196653697,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.089,
						"acc_norm,none": 0.089,
						"acc_norm_stderr,none": 0.009008893392651526,
						"acc_stderr,none": 0.009008893392651526,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.075,
						"acc_norm,none": 0.075,
						"acc_norm_stderr,none": 0.008333333333333378,
						"acc_stderr,none": 0.008333333333333378,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.194,
						"acc_norm,none": 0.194,
						"acc_norm_stderr,none": 0.012510816141264368,
						"acc_stderr,none": 0.012510816141264368,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.012535235623319325,
						"acc_stderr,none": 0.012535235623319325,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.17,
						"acc_norm,none": 0.17,
						"acc_norm_stderr,none": 0.015347940104209503,
						"acc_stderr,none": 0.015347940104209503,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.008,
						"acc_norm,none": 0.008,
						"acc_norm_stderr,none": 0.0028185003005045044,
						"acc_stderr,none": 0.0028185003005045044,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.002,
						"acc_norm,none": 0.002,
						"acc_norm_stderr,none": 0.0014135055705578176,
						"acc_stderr,none": 0.0014135055705578176,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.019,
						"acc_norm,none": 0.019,
						"acc_norm_stderr,none": 0.004319451082910625,
						"acc_stderr,none": 0.004319451082910625,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.033,
						"acc_norm,none": 0.033,
						"acc_norm_stderr,none": 0.005651808820452374,
						"acc_stderr,none": 0.005651808820452374,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.040347329239296424,
						"acc_stderr,none": 0.040347329239296424,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.019,
						"acc_norm,none": 0.019,
						"acc_norm_stderr,none": 0.004319451082910613,
						"acc_stderr,none": 0.004319451082910613,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.015,
						"acc_norm,none": 0.015,
						"acc_norm_stderr,none": 0.003845749574502999,
						"acc_stderr,none": 0.003845749574502999,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.177,
						"acc_norm,none": 0.177,
						"acc_norm_stderr,none": 0.012075463420375061,
						"acc_stderr,none": 0.012075463420375061,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.025,
						"acc_norm,none": 0.025,
						"acc_norm_stderr,none": 0.004939574819698462,
						"acc_stderr,none": 0.004939574819698462,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.13,
						"acc_norm,none": 0.13,
						"acc_norm_stderr,none": 0.010640169792499356,
						"acc_stderr,none": 0.010640169792499356,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.11,
						"acc_norm,none": 0.11,
						"acc_norm_stderr,none": 0.009899393819724432,
						"acc_stderr,none": 0.009899393819724432,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.081,
						"acc_norm,none": 0.081,
						"acc_norm_stderr,none": 0.008632121032139966,
						"acc_stderr,none": 0.008632121032139966,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.07,
						"acc_norm,none": 0.07,
						"acc_norm_stderr,none": 0.008072494358323485,
						"acc_stderr,none": 0.008072494358323485,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.018,
						"acc_norm,none": 0.018,
						"acc_norm_stderr,none": 0.0042063872496114615,
						"acc_stderr,none": 0.0042063872496114615,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.029,
						"acc_norm,none": 0.029,
						"acc_norm_stderr,none": 0.005309160685756985,
						"acc_stderr,none": 0.005309160685756985,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.057,
						"acc_norm,none": 0.057,
						"acc_norm_stderr,none": 0.007335175853706822,
						"acc_stderr,none": 0.007335175853706822,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.231,
						"acc_norm,none": 0.231,
						"acc_norm_stderr,none": 0.013334797216936426,
						"acc_stderr,none": 0.013334797216936426,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.07,
						"acc_norm,none": 0.07,
						"acc_norm_stderr,none": 0.008072494358323494,
						"acc_stderr,none": 0.008072494358323494,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.186,
						"acc_norm,none": 0.186,
						"acc_norm_stderr,none": 0.012310790208412808,
						"acc_stderr,none": 0.012310790208412808,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.14166666666666666,
						"acc_norm,none": 0.14166666666666666,
						"acc_norm_stderr,none": 0.014247819867919655,
						"acc_stderr,none": 0.014247819867919655,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.102,
						"acc_norm,none": 0.102,
						"acc_norm_stderr,none": 0.009575368801653897,
						"acc_stderr,none": 0.009575368801653897,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.105,
						"acc_norm,none": 0.105,
						"acc_norm_stderr,none": 0.009698921026024952,
						"acc_stderr,none": 0.009698921026024952,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.057,
						"acc_norm,none": 0.057,
						"acc_norm_stderr,none": 0.007335175853706827,
						"acc_stderr,none": 0.007335175853706827,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.08,
						"acc_norm,none": 0.08,
						"acc_norm_stderr,none": 0.008583336977753651,
						"acc_stderr,none": 0.008583336977753651,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22666666666666666,
						"acc_norm,none": 0.22666666666666666,
						"acc_norm_stderr,none": 0.024212609617951908,
						"acc_stderr,none": 0.024212609617951908,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.013394902889660013,
						"acc_stderr,none": 0.013394902889660013,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.039,
						"acc_norm,none": 0.039,
						"acc_norm_stderr,none": 0.006125072776426111,
						"acc_stderr,none": 0.006125072776426111,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.113,
						"acc_norm,none": 0.113,
						"acc_norm_stderr,none": 0.010016552866696839,
						"acc_stderr,none": 0.010016552866696839,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.027234326551496862,
						"acc_stderr,none": 0.027234326551496862,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.135,
						"acc_norm,none": 0.135,
						"acc_norm_stderr,none": 0.010811655372416054,
						"acc_stderr,none": 0.010811655372416054,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.151,
						"acc_norm,none": 0.151,
						"acc_norm_stderr,none": 0.011328165223341676,
						"acc_stderr,none": 0.011328165223341676,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.02,
						"acc_norm,none": 0.02,
						"acc_norm_stderr,none": 0.004429403980178359,
						"acc_stderr,none": 0.004429403980178359,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.46963385222538917,
						"acc_norm,none": 0.402,
						"acc_norm_stderr,none": 0.00048175551102203995,
						"acc_stderr,none": 0.04713293481394482,
						"alias": "kobest",
						"f1,none": 0.36190417759808324,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.477,
						"acc_stderr,none": 0.0158025542467261,
						"alias": " - kobest_copa",
						"f1,none": 0.4764969135339061,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.302,
						"acc_norm,none": 0.402,
						"acc_norm_stderr,none": 0.021948929609938612,
						"acc_stderr,none": 0.020553269174209195,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3001327480305582,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.48866498740554154,
						"acc_stderr,none": 0.025119488062637793,
						"alias": " - kobest_sentineg",
						"f1,none": 0.35634818583328676,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5393945274597322,
						"acc_stderr,none": 0.014950657730299681,
						"alias": "lambada",
						"perplexity,none": 8.629010456854761,
						"perplexity_stderr,none": 0.8280338341713326
					},
					"lambada_cloze": {
						"acc,none": 0.03580438579468271,
						"acc_stderr,none": 0.003264060801138701,
						"alias": "lambada_cloze",
						"perplexity,none": 599.2178309769854,
						"perplexity_stderr,none": 42.71421559379285
					},
					"lambada_multilingual": {
						"acc,none": 0.2986997865321172,
						"acc_stderr,none": 0.07790139997300333,
						"alias": "lambada_multilingual",
						"perplexity,none": 261.46166415418185,
						"perplexity_stderr,none": 86.99322977221938
					},
					"lambada_openai": {
						"acc,none": 0.5604502231709684,
						"acc_stderr,none": 0.006914879684264953,
						"alias": " - lambada_openai",
						"perplexity,none": 7.028865458851284,
						"perplexity_stderr,none": 0.1842057018833793
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.03182612070638463,
						"acc_stderr,none": 0.0024455728613517022,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 524.9380944657458,
						"perplexity_stderr,none": 18.138383384942742
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.20376479720551136,
						"acc_stderr,none": 0.005611737377556239,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 360.9006634582957,
						"perplexity_stderr,none": 23.020118610382262
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.5625849019988356,
						"acc_stderr,none": 0.006911192566731786,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 7.036980672560245,
						"perplexity_stderr,none": 0.1847748407024798
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.23267999223753152,
						"acc_stderr,none": 0.0058868077695984865,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 344.0726496404489,
						"perplexity_stderr,none": 21.08620746927672
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.2645061129439162,
						"acc_stderr,none": 0.006144965702579054,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 208.52231244163315,
						"perplexity_stderr,none": 12.630450879529452
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.22996312827479137,
						"acc_stderr,none": 0.00586269008864363,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 386.77571455797136,
						"perplexity_stderr,none": 25.742566245140576
					},
					"lambada_standard": {
						"acc,none": 0.5129051038230157,
						"acc_stderr,none": 0.00696365701905677,
						"alias": " - lambada_standard",
						"perplexity,none": 10.208451981632992,
						"perplexity_stderr,none": 0.2992123645292005
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.03978265088298079,
						"acc_stderr,none": 0.0027229753280860612,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 673.497567488225,
						"perplexity_stderr,none": 23.68726478856591
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.26208651399491095,
						"exact_match_stderr,get-answer": 0.011095246835491734
					},
					"logiqa": {
						"acc,none": 0.22119815668202766,
						"acc_norm,none": 0.27035330261136714,
						"acc_norm_stderr,none": 0.01742069478339314,
						"acc_stderr,none": 0.016279743532401667,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.22328244274809161,
						"acc_norm,none": 0.2881679389312977,
						"acc_norm_stderr,none": 0.011426770634965257,
						"acc_stderr,none": 0.010506807029651448,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.23450586264656617,
						"acc_norm,none": 0.22914572864321608,
						"acc_norm_stderr,none": 0.007693830518376536,
						"acc_stderr,none": 0.007756188894243554,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3731201016733743,
						"acc_stderr,none": 0.00497745676845701,
						"alias": "mc_taco",
						"f1,none": 0.49775137887144677,
						"f1_stderr,none": 0.005654028798830159
					},
					"medmcqa": {
						"acc,none": 0.2878316997370308,
						"acc_norm,none": 0.2878316997370308,
						"acc_norm_stderr,none": 0.007001137646905055,
						"acc_stderr,none": 0.007001137646905055,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.25765907305577374,
						"acc_norm,none": 0.25765907305577374,
						"acc_norm_stderr,none": 0.012262552134401231,
						"acc_stderr,none": 0.012262552134401231,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.25067654180316196,
						"acc_stderr,none": 0.03931635941693457,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04072314811876837,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.27631578947368424,
						"acc_stderr,none": 0.03639057569952925,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.22641509433962265,
						"acc_stderr,none": 0.025757559893106727,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2569444444444444,
						"acc_stderr,none": 0.03653946969442099,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322716,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.04020151261036845,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.23121387283236994,
						"acc_stderr,none": 0.0321473730202947,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.040925639582376556,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.225531914893617,
						"acc_stderr,none": 0.027321078417387533,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2543859649122807,
						"acc_stderr,none": 0.04096985139843671,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.32413793103448274,
						"acc_stderr,none": 0.03900432069185553,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.26455026455026454,
						"acc_stderr,none": 0.022717467897708624,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.1984126984126984,
						"acc_stderr,none": 0.03567016675276862,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.24838709677419354,
						"acc_stderr,none": 0.024580028921481003,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.32019704433497537,
						"acc_stderr,none": 0.03282649385304151,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.2545454545454545,
						"acc_stderr,none": 0.03401506715249039,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.23737373737373738,
						"acc_stderr,none": 0.0303137105381989,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.25906735751295334,
						"acc_stderr,none": 0.03161877917935409,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2076923076923077,
						"acc_stderr,none": 0.0205675395672468,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_stderr,none": 0.02696242432507383,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.20168067226890757,
						"acc_stderr,none": 0.026064313406304534,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23841059602649006,
						"acc_stderr,none": 0.03479185572599661,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.22935779816513763,
						"acc_stderr,none": 0.018025349724618684,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2361111111111111,
						"acc_stderr,none": 0.028963702570791037,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.2696078431372549,
						"acc_stderr,none": 0.031145570659486782,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2320675105485232,
						"acc_stderr,none": 0.02747974455080852,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.33183856502242154,
						"acc_stderr,none": 0.031602951437766785,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.1984732824427481,
						"acc_stderr,none": 0.034981493854624714,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.25844845908607866,
						"acc_stderr,none": 0.033664534052158475,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.33884297520661155,
						"acc_stderr,none": 0.0432076780753667,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.044531975073749834,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.26380368098159507,
						"acc_stderr,none": 0.03462419931615623,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04109974682633932,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.21359223300970873,
						"acc_stderr,none": 0.04058042015646034,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2564102564102564,
						"acc_stderr,none": 0.028605953702004264,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909282,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.26181353767560667,
						"acc_stderr,none": 0.01572083867844526,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2947976878612717,
						"acc_stderr,none": 0.024547617794803828,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24804469273743016,
						"acc_stderr,none": 0.01444415780826145,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.02463004897982476,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2471837785645317,
						"acc_stderr,none": 0.04120015887058747,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.22508038585209003,
						"acc_stderr,none": 0.02372008851617903,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2993827160493827,
						"acc_stderr,none": 0.025483115601195473,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2765957446808511,
						"acc_stderr,none": 0.026684564340460997,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2503259452411995,
						"acc_stderr,none": 0.011064151027165434,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.15441176470588236,
						"acc_stderr,none": 0.021950024722922037,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.28104575163398693,
						"acc_stderr,none": 0.018185218954318082,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.18181818181818182,
						"acc_stderr,none": 0.036942843353378,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.17959183673469387,
						"acc_stderr,none": 0.024573293589585637,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.23041923951901203,
						"acc_stderr,none": 0.03702372565904026,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.21890547263681592,
						"acc_stderr,none": 0.029239174636647,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2622898826514431,
						"acc_stderr,none": 0.04370383754003168,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.22289156626506024,
						"acc_stderr,none": 0.032400048255946876,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.27485380116959063,
						"acc_stderr,none": 0.034240429246915824,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3564951604686704,
						"acc_stderr,none": 0.004834813222302005,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.35343775427176566,
						"acc_stderr,none": 0.004821284862489386,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6446078431372549,
						"acc_stderr,none": 0.02372490639698967,
						"alias": "mrpc",
						"f1,none": 0.7716535433070866,
						"f1_stderr,none": 0.018518617442580102
					},
					"multimedqa": {
						"acc,none": 0.2916962384669979,
						"acc_norm,none": 0.2747816944388267,
						"acc_norm_stderr,none": 0.00010506467337814115,
						"acc_stderr,none": 0.07899167094603525,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5511551155115512,
						"acc_stderr,none": 0.0071441168843135,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.666666668337301,
						"mrr_stderr,none": 0.01035966541310035,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.44130925507900676,
						"r@2_stderr,none": 0.016691125435903998
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6302671199727543,
						"mrr_stderr,none": 0.010393788289465652,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.47742663656884876,
						"r@2_stderr,none": 0.016790178837117333
					},
					"openbookqa": {
						"acc,none": 0.226,
						"acc_norm,none": 0.326,
						"acc_norm_stderr,none": 0.020984009562393557,
						"acc_stderr,none": 0.018722956449139915,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.473,
						"acc_stderr,none": 0.011166819105029997,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4085,
						"acc_stderr,none": 0.0109942854318084,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.483,
						"acc_stderr,none": 0.011176670299310673,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.515,
						"acc_stderr,none": 0.011178102477052802,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5575,
						"acc_stderr,none": 0.011108941411747612,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.01117991481396971,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5495,
						"acc_stderr,none": 0.01112819811994288,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4997857142857143,
						"acc_stderr,none": 0.03871039585701613,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.720348204570185,
						"acc_norm,none": 0.719260065288357,
						"acc_norm_stderr,none": 0.010484325438311829,
						"acc_stderr,none": 0.010471899530306562,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.22865072587532023,
						"acc_norm,none": 0.28805508112724165,
						"acc_norm_stderr,none": 0.003308522701154255,
						"acc_stderr,none": 0.003068209293652854,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7178812736221523,
						"acc_norm,none": 0.44996122278655265,
						"acc_norm_stderr,none": 0.00400119153608266,
						"acc_stderr,none": 0.1468283150440037,
						"alias": "pythia",
						"bits_per_byte,none": 0.7560726480145469,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6888868178965153,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 7.028865458851284,
						"perplexity_stderr,none": 0.1842057018833793,
						"word_perplexity,none": 16.484762135338798,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.32978723404255317,
						"acc_norm,none": 0.40602836879432624,
						"acc_norm_stderr,none": 0.044816032059822264,
						"acc_stderr,none": 0.0414474796001638,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.36666666666666664,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.04583492485141056,
						"acc_stderr,none": 0.044175188121443124,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.38125,
						"acc_norm_stderr,none": 0.038518021388670956,
						"acc_stderr,none": 0.034893706520187605,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.352112676056338,
						"acc_norm,none": 0.38028169014084506,
						"acc_norm_stderr,none": 0.0288573637517583,
						"acc_stderr,none": 0.028392089391036893,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5194947830862163,
						"acc_stderr,none": 0.006760266253843522,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5210487261934207,
						"acc_stderr,none": 0.0024844961947545416,
						"alias": "qqp",
						"f1,none": 0.2481167973906966,
						"f1_stderr,none": 0.0035645042085236923
					},
					"race": {
						"acc,none": 0.3397129186602871,
						"acc_stderr,none": 0.014657914432586409,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.516245487364621,
						"acc_stderr,none": 0.030080573208738064,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.846,
						"acc_norm,none": 0.762,
						"acc_norm_stderr,none": 0.01347358666196722,
						"acc_stderr,none": 0.011419913065098703,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.51985559566787,
						"acc_stderr,none": 0.030072723167317177,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8279816513761468,
						"acc_stderr,none": 0.012787588897266155,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5058482455263421,
						"acc_norm,none": 0.6877436768969309,
						"acc_norm_stderr,none": 0.0032764205859902445,
						"acc_stderr,none": 0.0035348502245053236,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.4713653455791821,
						"acc_stderr,none": 0.038676102976220705,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5001001602564102,
						"acc_stderr,none": 0.005004255326032081,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.36921049964528224,
						"acc_stderr,none": 0.004858572568866989,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5420588235294118,
						"acc_stderr,none": 0.004933433300465599,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3372436224631588,
						"acc_stderr,none": 0.048121845997234285,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3402692778457772,
						"bleu_acc_stderr,none": 0.00027510551029423286,
						"bleu_diff,none": -5.672266529054544,
						"bleu_diff_stderr,none": 0.43096721928515636,
						"bleu_max,none": 19.007171908637446,
						"bleu_max_stderr,none": 0.4378366396008561,
						"rouge1_acc,none": 0.2717258261933905,
						"rouge1_acc_stderr,none": 0.00024251335977072583,
						"rouge1_diff,none": -9.611533086730972,
						"rouge1_diff_stderr,none": 0.7251258672009934,
						"rouge1_max,none": 40.12410355796137,
						"rouge1_max_stderr,none": 0.8195632275289366,
						"rouge2_acc,none": 0.1909424724602203,
						"rouge2_acc_stderr,none": 0.00018931794690073162,
						"rouge2_diff,none": -10.894202572385344,
						"rouge2_diff_stderr,none": 0.8343287759058793,
						"rouge2_max,none": 23.76900565755557,
						"rouge2_max_stderr,none": 0.8550033961054041,
						"rougeL_acc,none": 0.25703794369645044,
						"rougeL_acc_stderr,none": 0.0002340311754862133,
						"rougeL_diff,none": -9.511270603625562,
						"rougeL_diff_stderr,none": 0.6961023478754015,
						"rougeL_max,none": 37.6917411453721,
						"rougeL_max_stderr,none": 0.796072313119905
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3402692778457772,
						"bleu_acc_stderr,none": 0.016586304901762564,
						"bleu_diff,none": -5.672266529054544,
						"bleu_diff_stderr,none": 0.6564809359647517,
						"bleu_max,none": 19.007171908637446,
						"bleu_max_stderr,none": 0.6616922544513092,
						"rouge1_acc,none": 0.2717258261933905,
						"rouge1_acc_stderr,none": 0.015572840452875828,
						"rouge1_diff,none": -9.611533086730972,
						"rouge1_diff_stderr,none": 0.8515432268540414,
						"rouge1_max,none": 40.12410355796137,
						"rouge1_max_stderr,none": 0.9052973144381555,
						"rouge2_acc,none": 0.1909424724602203,
						"rouge2_acc_stderr,none": 0.013759285842685718,
						"rouge2_diff,none": -10.894202572385344,
						"rouge2_diff_stderr,none": 0.9134159928016803,
						"rouge2_max,none": 23.76900565755557,
						"rouge2_max_stderr,none": 0.9246639368470061,
						"rougeL_acc,none": 0.25703794369645044,
						"rougeL_acc_stderr,none": 0.015298077509485083,
						"rougeL_diff,none": -9.511270603625562,
						"rougeL_diff_stderr,none": 0.8343274823924964,
						"rougeL_max,none": 37.6917411453721,
						"rougeL_max_stderr,none": 0.8922288457116285
					},
					"truthfulqa_mc1": {
						"acc,none": 0.23990208078335373,
						"acc_stderr,none": 0.014948812679062135,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3859143933030613,
						"acc_stderr,none": 0.014220384230134374,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.04675196850393701,
						"exact_match_stderr,none": 0.004684335017570898
					},
					"wic": {
						"acc,none": 0.5031347962382445,
						"acc_stderr,none": 0.01981033193209754,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.7560726480145469,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6888868178965153,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 16.484762135338798,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.5935280189423836,
						"acc_stderr,none": 0.013804448697753373,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4225352112676056,
						"acc_stderr,none": 0.05903984205682581,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.38461538461538464,
						"acc_stderr,none": 0.0479366886807504,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.73992673992674,
						"acc_stderr,none": 0.026598537627601476,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5218181818181817,
						"acc_stderr,none": 0.029112404859548864,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.474,
						"acc_stderr,none": 0.02235279165091416,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231333,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407056,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.02233126442325838,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.022187215803029008,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.022331264423258383,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.36238286479250337,
						"acc_stderr,none": 0.04736041571640584,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.334136546184739,
						"acc_stderr,none": 0.009454577602463635,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337342,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.39156626506024095,
						"acc_stderr,none": 0.009783558109997094,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667058,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5325301204819277,
						"acc_stderr,none": 0.010000839483876011,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.40803212851405624,
						"acc_stderr,none": 0.009851078965044884,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.42048192771084336,
						"acc_stderr,none": 0.009894519551105778,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3365461847389558,
						"acc_stderr,none": 0.009471423054177119,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.3469879518072289,
						"acc_stderr,none": 0.009541251561568397,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3273092369477912,
						"acc_stderr,none": 0.009405338156614929,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667053,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3293172690763052,
						"acc_stderr,none": 0.009420053435910411,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939167,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.336144578313253,
						"acc_stderr,none": 0.009468634669293534,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.00946022348499647,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5127850309848986,
						"acc_stderr,none": 0.05850374929654006,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.4751819986763733,
						"acc_stderr,none": 0.012851264962354845,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.700860357379219,
						"acc_stderr,none": 0.011783227411626311,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5459960291197882,
						"acc_stderr,none": 0.012812565368728933,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.4956982131039047,
						"acc_stderr,none": 0.012866649085718848,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.49172733289212445,
						"acc_stderr,none": 0.0128653640203754,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.4811383189940437,
						"acc_stderr,none": 0.012857966762465001,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4745201853077432,
						"acc_stderr,none": 0.012850407240776846,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.47915287888815355,
						"acc_stderr,none": 0.012855936282881269,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.4884182660489742,
						"acc_stderr,none": 0.012863672949335884,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5254798146922568,
						"acc_stderr,none": 0.01285040724077685,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.4824619457313038,
						"acc_stderr,none": 0.012859207453266306,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.6709372892784895,
						"acc_stderr,none": 0.0789358842173095,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.7978494623655914,
						"acc_stderr,none": 0.00833066876481568,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.5060240963855421,
						"acc_stderr,none": 0.055211755360913765,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5109489051094891,
						"acc_stderr,none": 0.01615039318009044,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.5703422053231939,
						"acc_stderr,none": 0.030582885384412957,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5301587301587302,
						"acc_stderr,none": 0.028165256808123703,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.5575396825396826,
						"acc_stderr,none": 0.022145784143589496,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "facebook/opt-1.3b"
	},
	"facebook/opt-2.7b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.49633596392333706,
						"acc_norm,none": 0.46533258173618947,
						"acc_norm_stderr,none": 0.03730439508324662,
						"acc_stderr,none": 0.05414267431592354,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.340625,
						"acc_stderr,none": 0.014607722097368186,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.00505,
						"acc_stderr,none": 0.005260726854817631,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8499402985074627,
						"acc_stderr,none": 0.12536401749576476,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2310549777117385,
						"acc_norm,none": 0.2310549777117385,
						"acc_norm_stderr,none": 0.10795688357550898,
						"acc_stderr,none": 0.10795688357550898,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2526333966499741,
						"acc_norm,none": 0.2526333966499741,
						"acc_norm_stderr,none": 0.034914174346698995,
						"acc_stderr,none": 0.034914174346698995,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4833780560524747,
						"likelihood_diff_stderr,none": 0.4579821012978492,
						"pct_stereotype,none": 0.552772808586762,
						"pct_stereotype_stderr,none": 0.10696086057157732
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05265748031496063,
						"exact_match_stderr,none": 0.0049559691059712025
					},
					"glue": {
						"acc,none": 0.4532427748263178,
						"acc_stderr,none": 0.054731740386826246,
						"alias": "glue",
						"f1,none": 0.3275705642620722,
						"f1_stderr,none": 0.0021394863031687084,
						"mcc,none": -0.02104394798882378,
						"mcc_stderr,none": 0.0010043327966447533
					},
					"kmmlu": {
						"acc,none": 0.09910482240831649,
						"acc_norm,none": 0.09910482240831649,
						"acc_norm_stderr,none": 0.06431859334304811,
						"acc_stderr,none": 0.06431859334304811,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.46941460206095154,
						"acc_norm,none": 0.424,
						"acc_norm_stderr,none": 0.0004894268537074177,
						"acc_stderr,none": 0.047936916586116735,
						"alias": "kobest",
						"f1,none": 0.37303397555661894,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5957694546865904,
						"acc_stderr,none": 0.0203861348291268,
						"alias": "lambada",
						"perplexity,none": 6.252857881953717,
						"perplexity_stderr,none": 0.5964212167630275
					},
					"lambada_cloze": {
						"acc,none": 0.02804191732971085,
						"acc_stderr,none": 0.0023848036567005433,
						"alias": "lambada_cloze",
						"perplexity,none": 555.425370757919,
						"perplexity_stderr,none": 132.5159628239512
					},
					"lambada_multilingual": {
						"acc,none": 0.3540461866873666,
						"acc_stderr,none": 0.08237530591450967,
						"alias": "lambada_multilingual",
						"perplexity,none": 148.79641057063674,
						"perplexity_stderr,none": 48.1273487880044
					},
					"mmlu": {
						"acc,none": 0.2542372881355932,
						"acc_stderr,none": 0.03913989593849276,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2665249734325186,
						"acc_stderr,none": 0.03549849902257255,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.23817186997103315,
						"acc_stderr,none": 0.03808486039598211,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.24374390640233995,
						"acc_stderr,none": 0.031358175851439574,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2619727243894704,
						"acc_stderr,none": 0.04755646329831924,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.28062455642299505,
						"acc_norm,none": 0.2630575560939223,
						"acc_norm_stderr,none": 8.675630088457482e-05,
						"acc_stderr,none": 0.0858764424796511,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4878571428571429,
						"acc_stderr,none": 0.04462392330142569,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7266733226559607,
						"acc_norm,none": 0.47323604297403954,
						"acc_norm_stderr,none": 0.004305116307305878,
						"acc_stderr,none": 0.13235392031892734,
						"alias": "pythia",
						"bits_per_byte,none": 0.7182704688988365,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6452085435829098,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.1060301815197064,
						"perplexity_stderr,none": 0.1194988120521847,
						"word_perplexity,none": 14.329495159583042,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3829787234042553,
						"acc_norm,none": 0.4308510638297872,
						"acc_norm_stderr,none": 0.04510354423162246,
						"acc_stderr,none": 0.0399638467460431,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5512628531496456,
						"acc_stderr,none": 0.01875095912192575,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3260443409855264,
						"acc_stderr,none": 0.04903574076345207,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2864137086903305,
						"bleu_acc_stderr,none": 0.0002504667845154174,
						"bleu_diff,none": -7.904996234086893,
						"bleu_diff_stderr,none": 0.47284411623135175,
						"bleu_max,none": 22.071343082036353,
						"bleu_max_stderr,none": 0.4750781295005302,
						"rouge1_acc,none": 0.2558139534883721,
						"rouge1_acc_stderr,none": 0.00023330045917772103,
						"rouge1_diff,none": -10.225138105880674,
						"rouge1_diff_stderr,none": 0.607783866656897,
						"rouge1_max,none": 46.60660448126869,
						"rouge1_max_stderr,none": 0.7216515369901879,
						"rouge2_acc,none": 0.18604651162790697,
						"rouge2_acc_stderr,none": 0.00018557991070955286,
						"rouge2_diff,none": -12.062824161465633,
						"rouge2_diff_stderr,none": 0.7639654571974362,
						"rouge2_max,none": 29.71393594981115,
						"rouge2_max_stderr,none": 0.8812450059010208,
						"rougeL_acc,none": 0.24112607099143207,
						"rougeL_acc_stderr,none": 0.00022424545205841482,
						"rougeL_diff,none": -10.651902960629329,
						"rougeL_diff_stderr,none": 0.5902474874254069,
						"rougeL_max,none": 43.76329925594307,
						"rougeL_max_stderr,none": 0.7203746797108014
					},
					"xcopa": {
						"acc,none": 0.5218181818181817,
						"acc_stderr,none": 0.02982082596871637,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.37207496653279787,
						"acc_stderr,none": 0.055938434363918266,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5190421755610373,
						"acc_stderr,none": 0.058989395736043775,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7042031917284783,
						"acc_stderr,none": 0.08174513984822648,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.49633596392333706,
						"acc_norm,none": 0.46533258173618947,
						"acc_norm_stderr,none": 0.03730439508324662,
						"acc_stderr,none": 0.05414267431592354,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.340625,
						"acc_stderr,none": 0.014607722097368186,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.339,
						"acc_stderr,none": 0.01497675877162035,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.338,
						"acc_stderr,none": 0.014965960710224482,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3441666666666667,
						"acc_stderr,none": 0.013720551062295756,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.26791808873720135,
						"acc_norm,none": 0.3122866894197952,
						"acc_norm_stderr,none": 0.013542598541688065,
						"acc_stderr,none": 0.012942030195136426,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.609006734006734,
						"acc_norm,none": 0.5408249158249159,
						"acc_norm_stderr,none": 0.010225526906982613,
						"acc_stderr,none": 0.010012992232540634,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.00505,
						"acc_stderr,none": 0.005260726854817631,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.0007069298939339562,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0105,
						"acc_stderr,none": 0.0022797968630709894,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.023,
						"acc_stderr,none": 0.003352778036238045,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.013,
						"acc_stderr,none": 0.0025335171905233197,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521438,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000116,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.000706929893933947,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.004772234273318872,
						"acc_stderr,none": 0.0014357568013433984,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8499402985074627,
						"acc_stderr,none": 0.12536401749576476,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942317,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448795,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.796,
						"acc_stderr,none": 0.012749374359024377,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847165,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.788,
						"acc_stderr,none": 0.012931481864938053,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.667,
						"acc_stderr,none": 0.01491084616422986,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.011171786285496497,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.01113997751789015,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298363,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689103,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.0072744014816970735,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.00721297629463924,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427421,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037191,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.00877616208949111,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998383,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.01094526376104297,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.758,
						"acc_stderr,none": 0.013550631705555954,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.827,
						"acc_stderr,none": 0.011967214137559948,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963528036,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651528,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.866,
						"acc_stderr,none": 0.01077776229836968,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611429,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.015801065586651758,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037191,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.824,
						"acc_stderr,none": 0.012048616898597488,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.014174516461485246,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.843,
						"acc_stderr,none": 0.011510146979230204,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491144,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.864,
						"acc_stderr,none": 0.010845350230472988,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.00833333333333335,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.0102068692643818,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.647,
						"acc_stderr,none": 0.01512017260548369,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523741,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.012510816141264357,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.633,
						"acc_stderr,none": 0.01524937846417175,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.752,
						"acc_stderr,none": 0.013663187134877642,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.0069604200625714005,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.725,
						"acc_stderr,none": 0.014127086556490526,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996704,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.00896305396259208,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.011628164696727191,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426122,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469365,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890127,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.01391220865102135,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.408,
						"acc_stderr,none": 0.015549205052920673,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695459,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.00900889339265155,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998413,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557843962,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.015524980677122581,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946097,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639238,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099186,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.829,
						"acc_stderr,none": 0.011912216456264618,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963528036,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946092,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397332,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427413,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151113,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.00453647215130651,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727045,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.471,
						"acc_stderr,none": 0.015792669451628903,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.447,
						"acc_stderr,none": 0.015730176046009063,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.5990825688073395,
						"acc_stderr,none": 0.008571628711617004,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.4642857142857143,
						"acc_stderr,none": 0.06724777654937658,
						"alias": "cb",
						"f1,none": 0.28172132281721324,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2310549777117385,
						"acc_norm,none": 0.2310549777117385,
						"acc_norm_stderr,none": 0.10795688357550898,
						"acc_stderr,none": 0.10795688357550898,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736841,
						"acc_stderr,none": 0.05263157894736841,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757577,
						"acc_stderr,none": 0.07575757575757577,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.06429065810876616,
						"acc_stderr,none": 0.06429065810876616,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.06288639360110458,
						"acc_stderr,none": 0.06288639360110458,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.06861056852129647,
						"acc_stderr,none": 0.06861056852129647,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.16129032258064516,
						"acc_norm,none": 0.16129032258064516,
						"acc_norm_stderr,none": 0.06715051611181073,
						"acc_stderr,none": 0.06715051611181073,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755131,
						"acc_stderr,none": 0.08780518530755131,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271772,
						"acc_stderr,none": 0.10163945352271772,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.06957698714453991,
						"acc_stderr,none": 0.06957698714453991,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.06148754619013454,
						"acc_stderr,none": 0.06148754619013454,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2526333966499741,
						"acc_norm,none": 0.2526333966499741,
						"acc_norm_stderr,none": 0.034914174346698995,
						"acc_stderr,none": 0.034914174346698995,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.24880382775119617,
						"acc_norm,none": 0.24880382775119617,
						"acc_norm_stderr,none": 0.029975990636702532,
						"acc_stderr,none": 0.029975990636702532,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865143,
						"acc_stderr,none": 0.03462157845865143,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.25190839694656486,
						"acc_norm,none": 0.25190839694656486,
						"acc_norm_stderr,none": 0.03807387116306086,
						"acc_stderr,none": 0.03807387116306086,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037267799624996496,
						"acc_stderr,none": 0.037267799624996496,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.04218811928205305,
						"acc_stderr,none": 0.04218811928205305,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25077399380804954,
						"acc_norm,none": 0.25077399380804954,
						"acc_norm_stderr,none": 0.024155705949743284,
						"acc_stderr,none": 0.024155705949743284,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604257,
						"acc_stderr,none": 0.030587591351604257,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.25139664804469275,
						"acc_norm,none": 0.25139664804469275,
						"acc_norm_stderr,none": 0.032515888371841106,
						"acc_stderr,none": 0.032515888371841106,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.02830465794303529,
						"acc_stderr,none": 0.02830465794303529,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.045223500773820306,
						"acc_stderr,none": 0.045223500773820306,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.044801270921106716,
						"acc_stderr,none": 0.044801270921106716,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980982,
						"acc_stderr,none": 0.03957835471980982,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.24175824175824176,
						"acc_norm,none": 0.24175824175824176,
						"acc_norm_stderr,none": 0.025960319996852693,
						"acc_stderr,none": 0.025960319996852693,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03039153369274154,
						"acc_stderr,none": 0.03039153369274154,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2573099415204678,
						"acc_norm,none": 0.2573099415204678,
						"acc_norm_stderr,none": 0.03352799844161865,
						"acc_stderr,none": 0.03352799844161865,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.25170068027210885,
						"acc_norm,none": 0.25170068027210885,
						"acc_norm_stderr,none": 0.03591728013761648,
						"acc_stderr,none": 0.03591728013761648,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2446043165467626,
						"acc_norm,none": 0.2446043165467626,
						"acc_norm_stderr,none": 0.03659146222520568,
						"acc_stderr,none": 0.03659146222520568,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.034229240176444506,
						"acc_stderr,none": 0.034229240176444506,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25153374233128833,
						"acc_norm,none": 0.25153374233128833,
						"acc_norm_stderr,none": 0.03408997886857529,
						"acc_stderr,none": 0.03408997886857529,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.028394293050790515,
						"acc_stderr,none": 0.028394293050790515,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.030532892233932032,
						"acc_stderr,none": 0.030532892233932032,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.27310924369747897,
						"acc_norm,none": 0.27310924369747897,
						"acc_norm_stderr,none": 0.028942004040998164,
						"acc_stderr,none": 0.028942004040998164,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2782608695652174,
						"acc_norm,none": 0.2782608695652174,
						"acc_norm_stderr,none": 0.029614094221633722,
						"acc_stderr,none": 0.029614094221633722,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.03642192783741706,
						"acc_stderr,none": 0.03642192783741706,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2556818181818182,
						"acc_norm,none": 0.2556818181818182,
						"acc_norm_stderr,none": 0.03297692925434459,
						"acc_stderr,none": 0.03297692925434459,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2483221476510067,
						"acc_norm,none": 0.2483221476510067,
						"acc_norm_stderr,none": 0.0355134404169743,
						"acc_stderr,none": 0.0355134404169743,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552486,
						"acc_stderr,none": 0.03703667194552486,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714263,
						"acc_stderr,none": 0.04025566684714263,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.03642192783741706,
						"acc_stderr,none": 0.03642192783741706,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604674,
						"acc_stderr,none": 0.03893259610604674,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25060827250608275,
						"acc_norm,none": 0.25060827250608275,
						"acc_norm_stderr,none": 0.021402288814095338,
						"acc_stderr,none": 0.021402288814095338,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.029761395837435988,
						"acc_stderr,none": 0.029761395837435988,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.25203252032520324,
						"acc_norm,none": 0.25203252032520324,
						"acc_norm_stderr,none": 0.039308795268239924,
						"acc_stderr,none": 0.039308795268239924,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.26229508196721313,
						"acc_norm,none": 0.26229508196721313,
						"acc_norm_stderr,none": 0.03998929318926593,
						"acc_stderr,none": 0.03998929318926593,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.02985642316467189,
						"acc_stderr,none": 0.02985642316467189,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032364888900157734,
						"acc_stderr,none": 0.032364888900157734,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871163,
						"acc_stderr,none": 0.03152480234871163,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.25517241379310346,
						"acc_norm,none": 0.25517241379310346,
						"acc_norm_stderr,none": 0.03632984052707842,
						"acc_stderr,none": 0.03632984052707842,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055042,
						"acc_stderr,none": 0.04232473532055042,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.032888897342098225,
						"acc_stderr,none": 0.032888897342098225,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.25118483412322273,
						"acc_norm,none": 0.25118483412322273,
						"acc_norm_stderr,none": 0.029927771242945208,
						"acc_stderr,none": 0.029927771242945208,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.022360679774997897,
						"acc_stderr,none": 0.022360679774997897,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.028490144114909487,
						"acc_stderr,none": 0.028490144114909487,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.25287356321839083,
						"acc_norm,none": 0.25287356321839083,
						"acc_norm_stderr,none": 0.0330465186437516,
						"acc_stderr,none": 0.0330465186437516,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.028952167450890808,
						"acc_stderr,none": 0.028952167450890808,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2422360248447205,
						"acc_norm,none": 0.2422360248447205,
						"acc_norm_stderr,none": 0.03387086996153082,
						"acc_stderr,none": 0.03387086996153082,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.02104394798882378,
						"mcc_stderr,none": 0.03169121008489189
					},
					"copa": {
						"acc,none": 0.77,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.4833780560524747,
						"likelihood_diff_stderr,none": 0.4579821012978492,
						"pct_stereotype,none": 0.552772808586762,
						"pct_stereotype_stderr,none": 0.10696086057157732
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.4300089445438284,
						"likelihood_diff_stderr,none": 0.08227615604473758,
						"pct_stereotype,none": 0.6535480023852117,
						"pct_stereotype_stderr,none": 0.011623134771282741
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.5315934065934065,
						"likelihood_diff_stderr,none": 0.35635149951055944,
						"pct_stereotype,none": 0.6593406593406593,
						"pct_stereotype_stderr,none": 0.04995670951276871
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 4.795454545454546,
						"likelihood_diff_stderr,none": 1.8832600426998265,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.994230769230769,
						"likelihood_diff_stderr,none": 0.603104640257884,
						"pct_stereotype,none": 0.7230769230769231,
						"pct_stereotype_stderr,none": 0.055934767585573
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.538671875,
						"likelihood_diff_stderr,none": 0.15341589781141354,
						"pct_stereotype,none": 0.6625,
						"pct_stereotype_stderr,none": 0.026474909752348248
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.212962962962963,
						"likelihood_diff_stderr,none": 0.20839391644725255,
						"pct_stereotype,none": 0.5925925925925926,
						"pct_stereotype_stderr,none": 0.03350991604696043
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.626736111111111,
						"likelihood_diff_stderr,none": 0.3319411679265157,
						"pct_stereotype,none": 0.8194444444444444,
						"pct_stereotype_stderr,none": 0.04564949854152485
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.2317913385826773,
						"likelihood_diff_stderr,none": 0.14200603077534613,
						"pct_stereotype,none": 0.5708661417322834,
						"pct_stereotype_stderr,none": 0.021981612809080207
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.6328828828828827,
						"likelihood_diff_stderr,none": 0.3121436513675466,
						"pct_stereotype,none": 0.8018018018018018,
						"pct_stereotype_stderr,none": 0.03800905064816034
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.438172043010753,
						"likelihood_diff_stderr,none": 0.4749780991047751,
						"pct_stereotype,none": 0.8387096774193549,
						"pct_stereotype_stderr,none": 0.03834564688497144
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.009868421052632,
						"likelihood_diff_stderr,none": 0.22493947771738665,
						"pct_stereotype,none": 0.6526315789473685,
						"pct_stereotype_stderr,none": 0.03463365347393427
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.5388342277877163,
						"likelihood_diff_stderr,none": 0.08852425853063435,
						"pct_stereotype,none": 0.45199761478831246,
						"pct_stereotype_stderr,none": 0.012156884449033538
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.0166666666666666,
						"likelihood_diff_stderr,none": 0.2941056397637148,
						"pct_stereotype,none": 0.43333333333333335,
						"pct_stereotype_stderr,none": 0.052526671187288064
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.0576923076923075,
						"likelihood_diff_stderr,none": 0.4441155916843275,
						"pct_stereotype,none": 0.38461538461538464,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.1893939393939394,
						"likelihood_diff_stderr,none": 0.49325463425012545,
						"pct_stereotype,none": 0.6515151515151515,
						"pct_stereotype_stderr,none": 0.059101367791192905
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.0529595015576323,
						"likelihood_diff_stderr,none": 0.18007780905430798,
						"pct_stereotype,none": 0.49221183800623053,
						"pct_stereotype_stderr,none": 0.027947458769356347
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.0642292490118574,
						"likelihood_diff_stderr,none": 0.21591648004546196,
						"pct_stereotype,none": 0.31620553359683795,
						"pct_stereotype_stderr,none": 0.02929188048554201
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 4.211805555555555,
						"likelihood_diff_stderr,none": 0.5546842186411842,
						"pct_stereotype,none": 0.5138888888888888,
						"pct_stereotype_stderr,none": 0.059316185327165566
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.1472826086956522,
						"likelihood_diff_stderr,none": 0.16327116770326242,
						"pct_stereotype,none": 0.26304347826086955,
						"pct_stereotype_stderr,none": 0.020550782353701808
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 4.008695652173913,
						"likelihood_diff_stderr,none": 0.4109709535529399,
						"pct_stereotype,none": 0.6869565217391305,
						"pct_stereotype_stderr,none": 0.043432470166108225
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.3983516483516483,
						"likelihood_diff_stderr,none": 0.3235745961304115,
						"pct_stereotype,none": 0.7582417582417582,
						"pct_stereotype_stderr,none": 0.04513082148355003
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.8705357142857144,
						"likelihood_diff_stderr,none": 0.2869469959073107,
						"pct_stereotype,none": 0.6479591836734694,
						"pct_stereotype_stderr,none": 0.034202120189692285
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05265748031496063,
						"exact_match_stderr,none": 0.0049559691059712025
					},
					"glue": {
						"acc,none": 0.4532427748263178,
						"acc_stderr,none": 0.054731740386826246,
						"alias": "glue",
						"f1,none": 0.3275705642620722,
						"f1_stderr,none": 0.0021394863031687084,
						"mcc,none": -0.02104394798882378,
						"mcc_stderr,none": 0.0010043327966447533
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.003032600454890068,
						"exact_match_stderr,get-answer": 0.0015145735612245483
					},
					"hellaswag": {
						"acc,none": 0.45937064329814775,
						"acc_norm,none": 0.6057558255327624,
						"acc_norm_stderr,none": 0.0048768899831108355,
						"acc_stderr,none": 0.004973280417705515,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.09910482240831649,
						"acc_norm,none": 0.09910482240831649,
						"acc_norm_stderr,none": 0.06431859334304811,
						"acc_stderr,none": 0.06431859334304811,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.03861229196653697,
						"acc_stderr,none": 0.03861229196653697,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.089,
						"acc_norm,none": 0.089,
						"acc_norm_stderr,none": 0.00900889339265152,
						"acc_stderr,none": 0.00900889339265152,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.075,
						"acc_norm,none": 0.075,
						"acc_norm_stderr,none": 0.008333333333333378,
						"acc_stderr,none": 0.008333333333333378,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.192,
						"acc_norm,none": 0.192,
						"acc_norm_stderr,none": 0.01246159264665998,
						"acc_stderr,none": 0.01246159264665998,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.012535235623319329,
						"acc_stderr,none": 0.012535235623319329,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.17333333333333334,
						"acc_norm,none": 0.17333333333333334,
						"acc_norm_stderr,none": 0.015466528504746207,
						"acc_stderr,none": 0.015466528504746207,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.01,
						"acc_norm,none": 0.01,
						"acc_norm_stderr,none": 0.003148000938676761,
						"acc_stderr,none": 0.003148000938676761,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.005,
						"acc_norm,none": 0.005,
						"acc_norm_stderr,none": 0.00223158687484488,
						"acc_stderr,none": 0.00223158687484488,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.021,
						"acc_norm,none": 0.021,
						"acc_norm_stderr,none": 0.004536472151306499,
						"acc_stderr,none": 0.004536472151306499,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.034,
						"acc_norm,none": 0.034,
						"acc_norm_stderr,none": 0.005733836139695471,
						"acc_stderr,none": 0.005733836139695471,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.040347329239296424,
						"acc_stderr,none": 0.040347329239296424,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.02,
						"acc_norm,none": 0.02,
						"acc_norm_stderr,none": 0.004429403980178347,
						"acc_stderr,none": 0.004429403980178347,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.02,
						"acc_norm,none": 0.02,
						"acc_norm_stderr,none": 0.004429403980178341,
						"acc_stderr,none": 0.004429403980178341,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.181,
						"acc_norm,none": 0.181,
						"acc_norm_stderr,none": 0.012181436179177899,
						"acc_stderr,none": 0.012181436179177899,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.03,
						"acc_norm,none": 0.03,
						"acc_norm_stderr,none": 0.005397140829099211,
						"acc_stderr,none": 0.005397140829099211,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.133,
						"acc_norm,none": 0.133,
						"acc_norm_stderr,none": 0.01074366913239733,
						"acc_stderr,none": 0.01074366913239733,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.113,
						"acc_norm,none": 0.113,
						"acc_norm_stderr,none": 0.01001655286669685,
						"acc_stderr,none": 0.01001655286669685,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.083,
						"acc_norm,none": 0.083,
						"acc_norm_stderr,none": 0.008728527206074796,
						"acc_stderr,none": 0.008728527206074796,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.07,
						"acc_norm,none": 0.07,
						"acc_norm_stderr,none": 0.008072494358323485,
						"acc_stderr,none": 0.008072494358323485,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.02,
						"acc_norm,none": 0.02,
						"acc_norm_stderr,none": 0.004429403980178315,
						"acc_stderr,none": 0.004429403980178315,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.032,
						"acc_norm,none": 0.032,
						"acc_norm_stderr,none": 0.005568393575081365,
						"acc_stderr,none": 0.005568393575081365,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.057,
						"acc_norm,none": 0.057,
						"acc_norm_stderr,none": 0.007335175853706822,
						"acc_stderr,none": 0.007335175853706822,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.232,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.013354937452281574,
						"acc_stderr,none": 0.013354937452281574,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.072,
						"acc_norm,none": 0.072,
						"acc_norm_stderr,none": 0.008178195576218681,
						"acc_stderr,none": 0.008178195576218681,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.012285191326386707,
						"acc_stderr,none": 0.012285191326386707,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.13666666666666666,
						"acc_norm,none": 0.13666666666666666,
						"acc_norm_stderr,none": 0.014034829611310277,
						"acc_stderr,none": 0.014034829611310277,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.104,
						"acc_norm,none": 0.104,
						"acc_norm_stderr,none": 0.009658016218524289,
						"acc_stderr,none": 0.009658016218524289,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.107,
						"acc_norm,none": 0.107,
						"acc_norm_stderr,none": 0.009779910359847165,
						"acc_stderr,none": 0.009779910359847165,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.061,
						"acc_norm,none": 0.061,
						"acc_norm_stderr,none": 0.007572076091557422,
						"acc_stderr,none": 0.007572076091557422,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.083,
						"acc_norm,none": 0.083,
						"acc_norm_stderr,none": 0.008728527206074792,
						"acc_stderr,none": 0.008728527206074792,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22666666666666666,
						"acc_norm,none": 0.22666666666666666,
						"acc_norm_stderr,none": 0.024212609617951908,
						"acc_stderr,none": 0.024212609617951908,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.013414729030247109,
						"acc_stderr,none": 0.013414729030247109,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.041,
						"acc_norm,none": 0.041,
						"acc_norm_stderr,none": 0.006273624021118784,
						"acc_stderr,none": 0.006273624021118784,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.113,
						"acc_norm,none": 0.113,
						"acc_norm_stderr,none": 0.010016552866696839,
						"acc_stderr,none": 0.010016552866696839,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.027234326551496862,
						"acc_stderr,none": 0.027234326551496862,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.14,
						"acc_norm,none": 0.14,
						"acc_norm_stderr,none": 0.010978183844357803,
						"acc_stderr,none": 0.010978183844357803,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.152,
						"acc_norm,none": 0.152,
						"acc_norm_stderr,none": 0.011358918303475279,
						"acc_stderr,none": 0.011358918303475279,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.026,
						"acc_norm,none": 0.026,
						"acc_norm_stderr,none": 0.005034813735318231,
						"acc_stderr,none": 0.005034813735318231,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.46941460206095154,
						"acc_norm,none": 0.424,
						"acc_norm_stderr,none": 0.0004894268537074177,
						"acc_stderr,none": 0.047936916586116735,
						"alias": "kobest",
						"f1,none": 0.37303397555661894,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.01580663942303517,
						"alias": " - kobest_copa",
						"f1,none": 0.47908109905873963,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.298,
						"acc_norm,none": 0.424,
						"acc_norm_stderr,none": 0.022122993778135404,
						"acc_stderr,none": 0.020475118092988957,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.2958057939732427,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.4836272040302267,
						"acc_stderr,none": 0.025112470822047955,
						"alias": " - kobest_sentineg",
						"f1,none": 0.4831549868224685,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5957694546865904,
						"acc_stderr,none": 0.0203861348291268,
						"alias": "lambada",
						"perplexity,none": 6.252857881953717,
						"perplexity_stderr,none": 0.5964212167630275
					},
					"lambada_cloze": {
						"acc,none": 0.02804191732971085,
						"acc_stderr,none": 0.0023848036567005433,
						"alias": "lambada_cloze",
						"perplexity,none": 555.425370757919,
						"perplexity_stderr,none": 132.5159628239512
					},
					"lambada_multilingual": {
						"acc,none": 0.3540461866873666,
						"acc_stderr,none": 0.08237530591450967,
						"alias": "lambada_multilingual",
						"perplexity,none": 148.79641057063674,
						"perplexity_stderr,none": 48.1273487880044
					},
					"lambada_openai": {
						"acc,none": 0.6339996118765767,
						"acc_stderr,none": 0.006711156119694331,
						"alias": " - lambada_openai",
						"perplexity,none": 5.1060301815197064,
						"perplexity_stderr,none": 0.1194988120521847
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.029303318455268776,
						"acc_stderr,none": 0.0023496990846877722,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 294.384128267924,
						"perplexity_stderr,none": 10.030320094838048
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.23947215214438192,
						"acc_stderr,none": 0.005945619905289314,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 214.59317088441452,
						"perplexity_stderr,none": 13.311151910754974
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6336114884533282,
						"acc_stderr,none": 0.0067126579546010565,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 5.106550068786071,
						"perplexity_stderr,none": 0.11955276070996622
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.2720745196972637,
						"acc_stderr,none": 0.006200111064998436,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 206.2866828318435,
						"perplexity_stderr,none": 12.455255220157412
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.33630894624490587,
						"acc_stderr,none": 0.00658209679643863,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 115.27412720003156,
						"perplexity_stderr,none": 6.873650092300391
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.28876382689695324,
						"acc_stderr,none": 0.006313793671214648,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 202.72152186810794,
						"perplexity_stderr,none": 13.181913854678776
					},
					"lambada_standard": {
						"acc,none": 0.5573452357849796,
						"acc_stderr,none": 0.006920011095249954,
						"alias": " - lambada_standard",
						"perplexity,none": 7.400413030267212,
						"perplexity_stderr,none": 0.19658424515845352
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.02678051620415292,
						"acc_stderr,none": 0.0022491941343246117,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 816.4666132479142,
						"perplexity_stderr,none": 30.75522088508138
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.22073791348600508,
						"exact_match_stderr,get-answer": 0.010463865471633079
					},
					"logiqa": {
						"acc,none": 0.21044546850998463,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.017162894755127073,
						"acc_stderr,none": 0.015988369488888755,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.227735368956743,
						"acc_norm,none": 0.26463104325699743,
						"acc_norm_stderr,none": 0.011129738184571287,
						"acc_stderr,none": 0.01058059820424,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.24020100502512562,
						"acc_norm,none": 0.2371859296482412,
						"acc_norm_stderr,none": 0.007786717148416349,
						"acc_stderr,none": 0.007820551099979384,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3550095318788392,
						"acc_stderr,none": 0.004924789320583402,
						"alias": "mc_taco",
						"f1,none": 0.49851778656126483,
						"f1_stderr,none": 0.005570514390258821
					},
					"medmcqa": {
						"acc,none": 0.2581879034185991,
						"acc_norm,none": 0.2581879034185991,
						"acc_norm_stderr,none": 0.0067674162007500226,
						"acc_stderr,none": 0.0067674162007500226,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.2670856245090338,
						"acc_norm,none": 0.2670856245090338,
						"acc_norm_stderr,none": 0.012405329984332171,
						"acc_stderr,none": 0.012405329984332171,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.2542372881355932,
						"acc_stderr,none": 0.03913989593849276,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.32592592592592595,
						"acc_stderr,none": 0.040491220417025055,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.034597776068105386,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909284,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.22641509433962265,
						"acc_stderr,none": 0.025757559893106748,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.20833333333333334,
						"acc_stderr,none": 0.03396116205845334,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768079,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2254335260115607,
						"acc_stderr,none": 0.03186209851641144,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171452,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2170212765957447,
						"acc_stderr,none": 0.02694748312149622,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.20175438596491227,
						"acc_stderr,none": 0.037752050135836386,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.25517241379310346,
						"acc_stderr,none": 0.03632984052707842,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.02201908001221789,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.20634920634920634,
						"acc_stderr,none": 0.036196045241242494,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695236,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.24516129032258063,
						"acc_stderr,none": 0.024472243840895528,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.3251231527093596,
						"acc_stderr,none": 0.032957975663112704,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252604,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.296969696969697,
						"acc_stderr,none": 0.03567969772268049,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2474747474747475,
						"acc_stderr,none": 0.030746300742124505,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.2849740932642487,
						"acc_stderr,none": 0.032577140777096614,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2205128205128205,
						"acc_stderr,none": 0.02102067268082791,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3037037037037037,
						"acc_stderr,none": 0.028037929969114996,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.23109243697478993,
						"acc_stderr,none": 0.027381406927868963,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.271523178807947,
						"acc_stderr,none": 0.03631329803969653,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.23486238532110093,
						"acc_stderr,none": 0.018175110510343588,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.23148148148148148,
						"acc_stderr,none": 0.028765111718046955,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.030964517926923403,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2616033755274262,
						"acc_stderr,none": 0.028609516716994934,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.21524663677130046,
						"acc_stderr,none": 0.02758406660220827,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.22137404580152673,
						"acc_stderr,none": 0.03641297081313729,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2665249734325186,
						"acc_stderr,none": 0.03549849902257255,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.34710743801652894,
						"acc_stderr,none": 0.04345724570292534,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.04133119440243839,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3374233128834356,
						"acc_stderr,none": 0.037149084099355745,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04109974682633932,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.23300970873786409,
						"acc_stderr,none": 0.041858325989283136,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.28205128205128205,
						"acc_stderr,none": 0.02948036054954119,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.042295258468165065,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.24776500638569604,
						"acc_stderr,none": 0.015438083080568963,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2861271676300578,
						"acc_stderr,none": 0.02433214677913413,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.25027932960893856,
						"acc_stderr,none": 0.014487500852850412,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.24183006535947713,
						"acc_stderr,none": 0.024518195641879334,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.23817186997103315,
						"acc_stderr,none": 0.03808486039598211,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3086816720257235,
						"acc_stderr,none": 0.026236965881153266,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.28703703703703703,
						"acc_stderr,none": 0.02517104191530968,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.24822695035460993,
						"acc_stderr,none": 0.025770015644290392,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2516297262059974,
						"acc_stderr,none": 0.011083276280441904,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.16544117647058823,
						"acc_stderr,none": 0.022571771025494767,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.26633986928104575,
						"acc_stderr,none": 0.0178831881346672,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.24545454545454545,
						"acc_stderr,none": 0.04122066502878284,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.24897959183673468,
						"acc_stderr,none": 0.027682979522960234,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.24374390640233995,
						"acc_stderr,none": 0.031358175851439574,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.2537313432835821,
						"acc_stderr,none": 0.03076944496729601,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2619727243894704,
						"acc_stderr,none": 0.04755646329831924,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.042295258468165065,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.23493975903614459,
						"acc_stderr,none": 0.03300533186128922,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.03301405946987251,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.35557819663779927,
						"acc_stderr,none": 0.0048320302856709385,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.35394629780309195,
						"acc_stderr,none": 0.004822854375637905,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6666666666666666,
						"acc_stderr,none": 0.0233666545744261,
						"alias": "mrpc",
						"f1,none": 0.7976190476190477,
						"f1_stderr,none": 0.01698870242278138
					},
					"multimedqa": {
						"acc,none": 0.28062455642299505,
						"acc_norm,none": 0.2630575560939223,
						"acc_norm_stderr,none": 8.675630088457482e-05,
						"acc_stderr,none": 0.0858764424796511,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5713696369636964,
						"acc_stderr,none": 0.007108263771672479,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6828442455078086,
						"mrr_stderr,none": 0.010335104746827648,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4401805869074492,
						"r@2_stderr,none": 0.016686597274671547
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6291384518011968,
						"mrr_stderr,none": 0.010263174522680605,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.48758465011286684,
						"r@2_stderr,none": 0.016802133947307323
					},
					"openbookqa": {
						"acc,none": 0.248,
						"acc_norm,none": 0.352,
						"acc_norm_stderr,none": 0.021380042385946044,
						"acc_stderr,none": 0.019332342821239103,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.429,
						"acc_stderr,none": 0.011069813475627664,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.398,
						"acc_stderr,none": 0.010947964603728237,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4465,
						"acc_stderr,none": 0.01111893386729012,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5065,
						"acc_stderr,none": 0.011182191006142298,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5575,
						"acc_stderr,none": 0.01110894141174761,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5325,
						"acc_stderr,none": 0.011159486640120933,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.545,
						"acc_stderr,none": 0.011137752231145225,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4878571428571429,
						"acc_stderr,none": 0.04462392330142569,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7393906420021763,
						"acc_norm,none": 0.7470076169749728,
						"acc_norm_stderr,none": 0.010142888698862462,
						"acc_stderr,none": 0.010241826155811628,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.28816182749786506,
						"acc_norm,none": 0.29910333048676346,
						"acc_norm_stderr,none": 0.003345112852134583,
						"acc_stderr,none": 0.003308887585290702,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.02191237788577996,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7266733226559607,
						"acc_norm,none": 0.47323604297403954,
						"acc_norm_stderr,none": 0.004305116307305878,
						"acc_stderr,none": 0.13235392031892734,
						"alias": "pythia",
						"bits_per_byte,none": 0.7182704688988365,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6452085435829098,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.1060301815197064,
						"perplexity_stderr,none": 0.1194988120521847,
						"word_perplexity,none": 14.329495159583042,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3829787234042553,
						"acc_norm,none": 0.4308510638297872,
						"acc_norm_stderr,none": 0.04510354423162246,
						"acc_stderr,none": 0.0399638467460431,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.44166666666666665,
						"acc_norm,none": 0.5166666666666667,
						"acc_norm_stderr,none": 0.045809453927047654,
						"acc_stderr,none": 0.04552192400253557,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.39375,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.03945381823835187,
						"acc_stderr,none": 0.03874695666685831,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.352112676056338,
						"acc_norm,none": 0.38380281690140844,
						"acc_norm_stderr,none": 0.028908177688046176,
						"acc_stderr,none": 0.028392089391036893,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.509793153944719,
						"acc_stderr,none": 0.006764112742205993,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.48817709621568145,
						"acc_stderr,none": 0.0024860053472219004,
						"alias": "qqp",
						"f1,none": 0.32307239360136086,
						"f1_stderr,none": 0.003465688102774173
					},
					"race": {
						"acc,none": 0.36555023923444974,
						"acc_stderr,none": 0.014904654247182307,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5523465703971119,
						"acc_stderr,none": 0.02993107036293953,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.856,
						"acc_norm,none": 0.79,
						"acc_norm_stderr,none": 0.012886662332274526,
						"acc_stderr,none": 0.011107987548939149,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5487364620938628,
						"acc_stderr,none": 0.029953149241808943,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.5172018348623854,
						"acc_stderr,none": 0.01693182442590374,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5239928021593522,
						"acc_norm,none": 0.7127861641507548,
						"acc_norm_stderr,none": 0.0031989910958447657,
						"acc_stderr,none": 0.0035310197177532573,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5512628531496456,
						"acc_stderr,none": 0.01875095912192575,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5893429487179487,
						"acc_stderr,none": 0.0049237177870056536,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.526705178879092,
						"acc_stderr,none": 0.005026655417763789,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5377450980392157,
						"acc_stderr,none": 0.004936853011387222,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3260443409855264,
						"acc_stderr,none": 0.04903574076345207,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2864137086903305,
						"bleu_acc_stderr,none": 0.0002504667845154174,
						"bleu_diff,none": -7.904996234086893,
						"bleu_diff_stderr,none": 0.47284411623135175,
						"bleu_max,none": 22.071343082036353,
						"bleu_max_stderr,none": 0.4750781295005302,
						"rouge1_acc,none": 0.2558139534883721,
						"rouge1_acc_stderr,none": 0.00023330045917772103,
						"rouge1_diff,none": -10.225138105880674,
						"rouge1_diff_stderr,none": 0.607783866656897,
						"rouge1_max,none": 46.60660448126869,
						"rouge1_max_stderr,none": 0.7216515369901879,
						"rouge2_acc,none": 0.18604651162790697,
						"rouge2_acc_stderr,none": 0.00018557991070955286,
						"rouge2_diff,none": -12.062824161465633,
						"rouge2_diff_stderr,none": 0.7639654571974362,
						"rouge2_max,none": 29.71393594981115,
						"rouge2_max_stderr,none": 0.8812450059010208,
						"rougeL_acc,none": 0.24112607099143207,
						"rougeL_acc_stderr,none": 0.00022424545205841482,
						"rougeL_diff,none": -10.651902960629329,
						"rougeL_diff_stderr,none": 0.5902474874254069,
						"rougeL_max,none": 43.76329925594307,
						"rougeL_max_stderr,none": 0.7203746797108014
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2864137086903305,
						"bleu_acc_stderr,none": 0.01582614243950235,
						"bleu_diff,none": -7.904996234086893,
						"bleu_diff_stderr,none": 0.6876366164125873,
						"bleu_max,none": 22.071343082036353,
						"bleu_max_stderr,none": 0.6892591163709989,
						"rouge1_acc,none": 0.2558139534883721,
						"rouge1_acc_stderr,none": 0.01527417621928335,
						"rouge1_diff,none": -10.225138105880674,
						"rouge1_diff_stderr,none": 0.7796049426837268,
						"rouge1_max,none": 46.60660448126869,
						"rouge1_max_stderr,none": 0.8495007574983015,
						"rouge2_acc,none": 0.18604651162790697,
						"rouge2_acc_stderr,none": 0.013622771770442051,
						"rouge2_diff,none": -12.062824161465633,
						"rouge2_diff_stderr,none": 0.874051175388167,
						"rouge2_max,none": 29.71393594981115,
						"rouge2_max_stderr,none": 0.9387465077969775,
						"rougeL_acc,none": 0.24112607099143207,
						"rougeL_acc_stderr,none": 0.014974827279752339,
						"rougeL_diff,none": -10.651902960629329,
						"rougeL_diff_stderr,none": 0.7682756584881542,
						"rougeL_max,none": 43.76329925594307,
						"rougeL_max_stderr,none": 0.8487488908451082
					},
					"truthfulqa_mc1": {
						"acc,none": 0.22643818849449204,
						"acc_stderr,none": 0.014651337324602587,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.37584741723104365,
						"acc_stderr,none": 0.013822922656550548,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.05265748031496063,
						"exact_match_stderr,none": 0.0049559691059712025
					},
					"wic": {
						"acc,none": 0.5062695924764891,
						"acc_stderr,none": 0.019809163801196513,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.7182704688988365,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6452085435829098,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 14.329495159583042,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6093133385951065,
						"acc_stderr,none": 0.013712536036556673,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4084507042253521,
						"acc_stderr,none": 0.05875113694257524,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.04770204856076104,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7912087912087912,
						"acc_stderr,none": 0.024644340711969324,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5218181818181817,
						"acc_stderr,none": 0.02982082596871637,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.466,
						"acc_stderr,none": 0.02233126442325838,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928028,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668086,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269945,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269945,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.562,
						"acc_stderr,none": 0.022210326363977417,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.02231833811987053,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.37207496653279787,
						"acc_stderr,none": 0.055938434363918266,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3481927710843373,
						"acc_stderr,none": 0.00954898064915338,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3301204819277108,
						"acc_stderr,none": 0.009425884992430716,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.44417670682730925,
						"acc_stderr,none": 0.009959414626897997,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3329317269076305,
						"acc_stderr,none": 0.009446051001358239,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5618473895582329,
						"acc_stderr,none": 0.00994510647455373,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.43293172690763054,
						"acc_stderr,none": 0.009931501976863056,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4397590361445783,
						"acc_stderr,none": 0.009949067285169349,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3293172690763052,
						"acc_stderr,none": 0.009420053435910408,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.3598393574297189,
						"acc_stderr,none": 0.009620250217765984,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3377510040160643,
						"acc_stderr,none": 0.009479742273956473,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3301204819277108,
						"acc_stderr,none": 0.009425884992430716,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939166,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667053,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3369477911646586,
						"acc_stderr,none": 0.009474203778757719,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3289156626506024,
						"acc_stderr,none": 0.009417125981806726,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5190421755610373,
						"acc_stderr,none": 0.058989395736043775,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.4824619457313038,
						"acc_stderr,none": 0.012859207453266304,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7035076108537393,
						"acc_stderr,none": 0.011753107305763628,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5684976836532097,
						"acc_stderr,none": 0.012745810046098403,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.499669093315685,
						"acc_stderr,none": 0.012867122498493415,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5029781601588352,
						"acc_stderr,none": 0.012866897066011233,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.49106551952349436,
						"acc_stderr,none": 0.012865070917320797,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4692256783587028,
						"acc_stderr,none": 0.01284273034058578,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.48643282594308407,
						"acc_stderr,none": 0.012862387586650072,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.4943745863666446,
						"acc_stderr,none": 0.012866310923072515,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.528127068166777,
						"acc_stderr,none": 0.012846749995797699,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.4831237590999338,
						"acc_stderr,none": 0.012859793919977602,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7042031917284783,
						"acc_stderr,none": 0.08174513984822648,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8326881720430107,
						"acc_stderr,none": 0.0077425934899901586,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6626506024096386,
						"acc_stderr,none": 0.05221260262032129,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5307612095933264,
						"acc_stderr,none": 0.016123665745137194,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6311787072243346,
						"acc_stderr,none": 0.029808046634490215,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5714285714285714,
						"acc_stderr,none": 0.02792722339076032,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.5694444444444444,
						"acc_stderr,none": 0.02207782498650611,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "facebook/opt-2.7b"
	},
	"facebook/opt-6.7b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5386133032694476,
						"acc_norm,none": 0.5155016910935738,
						"acc_norm_stderr,none": 0.041631208646455586,
						"acc_stderr,none": 0.056193646854792584,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.330625,
						"acc_stderr,none": 0.016443697255108088,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0075499999999999986,
						"acc_stderr,none": 0.007421895708244642,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8515074626865672,
						"acc_stderr,none": 0.12963868205673482,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.24591381872213966,
						"acc_norm,none": 0.24591381872213966,
						"acc_norm_stderr,none": 0.11082820107373707,
						"acc_stderr,none": 0.11082820107373707,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2522016922811258,
						"acc_norm,none": 0.2522016922811258,
						"acc_norm_stderr,none": 0.0375942282634118,
						"acc_stderr,none": 0.0375942282634118,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.434593023255814,
						"likelihood_diff_stderr,none": 0.45642347923154075,
						"pct_stereotype,none": 0.5766249254621347,
						"pct_stereotype_stderr,none": 0.10036269156300633
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.08759842519685039,
						"exact_match_stderr,none": 0.0062731576336123905
					},
					"glue": {
						"acc,none": 0.4479546430605564,
						"acc_stderr,none": 0.07085107342766765,
						"alias": "glue",
						"f1,none": 0.3718310013716583,
						"f1_stderr,none": 0.0014146435259075004,
						"mcc,none": -0.006567160417631986,
						"mcc_stderr,none": 0.0009653719789053045
					},
					"kmmlu": {
						"acc,none": 0.09841178169217442,
						"acc_norm,none": 0.09841178169217442,
						"acc_norm_stderr,none": 0.06433910675649089,
						"acc_stderr,none": 0.06433910675649089,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.46744135058101294,
						"acc_norm,none": 0.43,
						"acc_norm_stderr,none": 0.0004911823647294574,
						"acc_stderr,none": 0.04926168838119018,
						"alias": "kobest",
						"f1,none": 0.36610912071793955,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6523384436250728,
						"acc_stderr,none": 0.01252307762641048,
						"alias": "lambada",
						"perplexity,none": 4.7331850055618805,
						"perplexity_stderr,none": 0.26420318619980676
					},
					"lambada_cloze": {
						"acc,none": 0.03755094119930138,
						"acc_stderr,none": 0.004657681211612462,
						"alias": "lambada_cloze",
						"perplexity,none": 406.28564830227816,
						"perplexity_stderr,none": 83.71095243163587
					},
					"lambada_multilingual": {
						"acc,none": 0.3881622355909179,
						"acc_stderr,none": 0.08375300012280831,
						"alias": "lambada_multilingual",
						"perplexity,none": 94.24299539565746,
						"perplexity_stderr,none": 30.028815681145435
					},
					"mmlu": {
						"acc,none": 0.2503916820965675,
						"acc_stderr,none": 0.03702168702154798,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2563230605738576,
						"acc_stderr,none": 0.033748847743544266,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.25490827164467333,
						"acc_stderr,none": 0.03891388181415792,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2365940851478713,
						"acc_stderr,none": 0.03257627485114301,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2505550269584523,
						"acc_stderr,none": 0.042181165505982395,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.265152590489709,
						"acc_norm,none": 0.24448362505753568,
						"acc_norm_stderr,none": 0.00011512370899131588,
						"acc_stderr,none": 0.0872894702734837,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4877142857142857,
						"acc_stderr,none": 0.049786073170358214,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7321891499583415,
						"acc_norm,none": 0.52267854559101,
						"acc_norm_stderr,none": 0.0044258058237959235,
						"acc_stderr,none": 0.13169555347481052,
						"alias": "pythia",
						"bits_per_byte,none": 0.6767232860854291,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.598505032488132,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.250598313839249,
						"perplexity_stderr,none": 0.09252673510748143,
						"word_perplexity,none": 12.284305307357904,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4024822695035461,
						"acc_norm,none": 0.4627659574468085,
						"acc_norm_stderr,none": 0.0499272283431575,
						"acc_stderr,none": 0.040562881423311964,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5515290672523377,
						"acc_stderr,none": 0.022361709180050694,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3079820170872586,
						"acc_stderr,none": 0.04306092106663105,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.29008567931456547,
						"bleu_acc_stderr,none": 0.0002523725220234942,
						"bleu_diff,none": -8.57088064642411,
						"bleu_diff_stderr,none": 0.6576073444054554,
						"bleu_max,none": 23.824101893552733,
						"bleu_max_stderr,none": 0.5554250697572132,
						"rouge1_acc,none": 0.2717258261933905,
						"rouge1_acc_stderr,none": 0.00024251335977072588,
						"rouge1_diff,none": -10.858328818075696,
						"rouge1_diff_stderr,none": 0.8205581843037311,
						"rouge1_max,none": 48.84229807754479,
						"rouge1_max_stderr,none": 0.7382658420689897,
						"rouge2_acc,none": 0.204406364749082,
						"rouge2_acc_stderr,none": 0.00019929461127346601,
						"rouge2_diff,none": -13.191388596437479,
						"rouge2_diff_stderr,none": 1.0841061883387555,
						"rouge2_max,none": 31.615412268381807,
						"rouge2_max_stderr,none": 0.9677786270619678,
						"rougeL_acc,none": 0.26560587515299877,
						"rougeL_acc_stderr,none": 0.00023904337529069867,
						"rougeL_diff,none": -11.114783151246234,
						"rougeL_diff_stderr,none": 0.8420572608514878,
						"rougeL_max,none": 45.927717086588885,
						"rougeL_max_stderr,none": 0.7574028486713349
					},
					"xcopa": {
						"acc,none": 0.5229090909090909,
						"acc_stderr,none": 0.027215770707865124,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.3784738955823293,
						"acc_stderr,none": 0.054985199168171614,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5250586607304013,
						"acc_stderr,none": 0.06681237456634356,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7104967408406383,
						"acc_stderr,none": 0.08278874226692942,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5386133032694476,
						"acc_norm,none": 0.5155016910935738,
						"acc_norm_stderr,none": 0.041631208646455586,
						"acc_stderr,none": 0.056193646854792584,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.330625,
						"acc_stderr,none": 0.016443697255108088,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.014632638658632895,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.014933117490932572,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3441666666666667,
						"acc_stderr,none": 0.013720551062295758,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.30119453924914674,
						"acc_norm,none": 0.3430034129692833,
						"acc_norm_stderr,none": 0.013872423223718169,
						"acc_stderr,none": 0.013406741767847629,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6557239057239057,
						"acc_norm,none": 0.6005892255892256,
						"acc_norm_stderr,none": 0.010050018228742115,
						"acc_stderr,none": 0.009749495321590817,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0075499999999999986,
						"acc_stderr,none": 0.007421895708244642,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.018,
						"acc_stderr,none": 0.0029736208922129317,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0055,
						"acc_stderr,none": 0.0016541593398342208,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.036,
						"acc_stderr,none": 0.004166614973833173,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.012,
						"acc_stderr,none": 0.0024353573624298335,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521483,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.002,
						"acc_stderr,none": 0.0009992493430694893,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000151,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.005639913232104121,
						"acc_stderr,none": 0.0015601516534579569,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8515074626865672,
						"acc_stderr,none": 0.12963868205673482,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942314,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298224,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987286,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.012795613612786553,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745911,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.782,
						"acc_stderr,none": 0.013063179040595285,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.621,
						"acc_stderr,none": 0.01534909100222535,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.842,
						"acc_stderr,none": 0.01153989467755956,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397335,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469417,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.00791034598317755,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.0072744014816970536,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666674,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696837,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753651,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910651,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343965,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.013019735539307804,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.01177211037081219,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.011419913065098687,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323506,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.011800434324644608,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.002987963843142667,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.01581879370351089,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787738,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.811,
						"acc_stderr,none": 0.012386784588117719,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.014111099288259588,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235254,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571401,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.01135891830347528,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704159,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.01057013376110866,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.695,
						"acc_stderr,none": 0.014566646394664392,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796379,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099187,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.635,
						"acc_stderr,none": 0.015231776226264895,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.013019735539307818,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426748,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.013127502859696244,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340973,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783222,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.82,
						"acc_stderr,none": 0.012155153135511958,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698451,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.001000000000000009,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397334,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617328,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.403,
						"acc_stderr,none": 0.015518757419066527,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.0049395748196984605,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.0088234263669423,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.003444977194099855,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.821,
						"acc_stderr,none": 0.012128730605719104,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.015819299929208316,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504396,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487923,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.783,
						"acc_stderr,none": 0.01304151375727071,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992448,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345677,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731968,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666676,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487912,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611498,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689088,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.482,
						"acc_stderr,none": 0.015809045699406728,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.417,
						"acc_stderr,none": 0.015599819048769618,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6547400611620795,
						"acc_stderr,none": 0.008315724479705725,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.26785714285714285,
						"acc_stderr,none": 0.05971290310957636,
						"alias": "cb",
						"f1,none": 0.23539344815940563,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.24591381872213966,
						"acc_norm,none": 0.24591381872213966,
						"acc_norm_stderr,none": 0.11082820107373707,
						"acc_stderr,none": 0.11082820107373707,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275461,
						"acc_stderr,none": 0.08124094920275461,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2127659574468085,
						"acc_norm,none": 0.2127659574468085,
						"acc_norm_stderr,none": 0.060342609647735204,
						"acc_stderr,none": 0.060342609647735204,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0606060606060606,
						"acc_stderr,none": 0.0606060606060606,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.11967838846954226,
						"acc_stderr,none": 0.11967838846954226,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063839,
						"acc_stderr,none": 0.07213122508063839,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215394,
						"acc_stderr,none": 0.07180198468215394,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894599,
						"acc_stderr,none": 0.10540925533894599,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.09176629354822471,
						"acc_stderr,none": 0.09176629354822471,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.06007385040937024,
						"acc_stderr,none": 0.06007385040937024,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141224,
						"acc_stderr,none": 0.06372446937141224,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.1724137931034483,
						"acc_norm,none": 0.1724137931034483,
						"acc_norm_stderr,none": 0.0713860923457608,
						"acc_stderr,none": 0.0713860923457608,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.12244897959183673,
						"acc_norm,none": 0.12244897959183673,
						"acc_norm_stderr,none": 0.047314380079059706,
						"acc_stderr,none": 0.047314380079059706,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0679170334216026,
						"acc_stderr,none": 0.0679170334216026,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.2391304347826087,
						"acc_norm,none": 0.2391304347826087,
						"acc_norm_stderr,none": 0.06358669845936323,
						"acc_stderr,none": 0.06358669845936323,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.0808104675899639,
						"acc_stderr,none": 0.0808104675899639,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2522016922811258,
						"acc_norm,none": 0.2522016922811258,
						"acc_norm_stderr,none": 0.0375942282634118,
						"acc_stderr,none": 0.0375942282634118,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.22485207100591717,
						"acc_norm,none": 0.22485207100591717,
						"acc_norm_stderr,none": 0.03220965704514523,
						"acc_stderr,none": 0.03220965704514523,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.22972972972972974,
						"acc_norm,none": 0.22972972972972974,
						"acc_norm_stderr,none": 0.03469536825407607,
						"acc_stderr,none": 0.03469536825407607,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2865853658536585,
						"acc_norm,none": 0.2865853658536585,
						"acc_norm_stderr,none": 0.03541638332993504,
						"acc_stderr,none": 0.03541638332993504,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.034049163262375844,
						"acc_stderr,none": 0.034049163262375844,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.033175059300091805,
						"acc_stderr,none": 0.033175059300091805,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.27751196172248804,
						"acc_norm,none": 0.27751196172248804,
						"acc_norm_stderr,none": 0.031047348519843285,
						"acc_stderr,none": 0.031047348519843285,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.03541088558070894,
						"acc_stderr,none": 0.03541088558070894,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.25190839694656486,
						"acc_norm,none": 0.25190839694656486,
						"acc_norm_stderr,none": 0.03807387116306086,
						"acc_stderr,none": 0.03807387116306086,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.037970424962817856,
						"acc_stderr,none": 0.037970424962817856,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.04218811928205305,
						"acc_stderr,none": 0.04218811928205305,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.24148606811145512,
						"acc_norm,none": 0.24148606811145512,
						"acc_norm_stderr,none": 0.023850631658205956,
						"acc_stderr,none": 0.023850631658205956,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.24019607843137256,
						"acc_norm,none": 0.24019607843137256,
						"acc_norm_stderr,none": 0.02998373305591362,
						"acc_stderr,none": 0.02998373305591362,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.25139664804469275,
						"acc_norm,none": 0.25139664804469275,
						"acc_norm_stderr,none": 0.032515888371841106,
						"acc_stderr,none": 0.032515888371841106,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24050632911392406,
						"acc_norm,none": 0.24050632911392406,
						"acc_norm_stderr,none": 0.027820781981149675,
						"acc_stderr,none": 0.027820781981149675,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.0452235007738203,
						"acc_stderr,none": 0.0452235007738203,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.043505468189990605,
						"acc_stderr,none": 0.043505468189990605,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.19444444444444445,
						"acc_norm,none": 0.19444444444444445,
						"acc_norm_stderr,none": 0.03826076324884864,
						"acc_stderr,none": 0.03826076324884864,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.20952380952380953,
						"acc_norm,none": 0.20952380952380953,
						"acc_norm_stderr,none": 0.039906571509931855,
						"acc_stderr,none": 0.039906571509931855,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.23443223443223443,
						"acc_norm,none": 0.23443223443223443,
						"acc_norm_stderr,none": 0.02568715645908419,
						"acc_stderr,none": 0.02568715645908419,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.24509803921568626,
						"acc_norm,none": 0.24509803921568626,
						"acc_norm_stderr,none": 0.030190282453501964,
						"acc_stderr,none": 0.030190282453501964,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2573099415204678,
						"acc_norm,none": 0.2573099415204678,
						"acc_norm_stderr,none": 0.03352799844161865,
						"acc_stderr,none": 0.03352799844161865,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.035589261576067566,
						"acc_stderr,none": 0.035589261576067566,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2446043165467626,
						"acc_norm,none": 0.2446043165467626,
						"acc_norm_stderr,none": 0.03659146222520568,
						"acc_stderr,none": 0.03659146222520568,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.23270440251572327,
						"acc_norm,none": 0.23270440251572327,
						"acc_norm_stderr,none": 0.03361670240809546,
						"acc_stderr,none": 0.03361670240809546,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2392638036809816,
						"acc_norm,none": 0.2392638036809816,
						"acc_norm_stderr,none": 0.03351953879521269,
						"acc_stderr,none": 0.03351953879521269,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.03336605189761063,
						"acc_stderr,none": 0.03336605189761063,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2896825396825397,
						"acc_norm,none": 0.2896825396825397,
						"acc_norm_stderr,none": 0.02863192475336099,
						"acc_stderr,none": 0.02863192475336099,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2474747474747475,
						"acc_norm,none": 0.2474747474747475,
						"acc_norm_stderr,none": 0.030746300742124505,
						"acc_stderr,none": 0.030746300742124505,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.029344572500634342,
						"acc_stderr,none": 0.029344572500634342,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.029017133559381264,
						"acc_stderr,none": 0.029017133559381264,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.037125378336148665,
						"acc_stderr,none": 0.037125378336148665,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.24475524475524477,
						"acc_norm,none": 0.24475524475524477,
						"acc_norm_stderr,none": 0.03607993033081378,
						"acc_stderr,none": 0.03607993033081378,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2556818181818182,
						"acc_norm,none": 0.2556818181818182,
						"acc_norm_stderr,none": 0.03297692925434459,
						"acc_stderr,none": 0.03297692925434459,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2550335570469799,
						"acc_norm,none": 0.2550335570469799,
						"acc_norm_stderr,none": 0.035829121651111746,
						"acc_stderr,none": 0.035829121651111746,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.033071627503231775,
						"acc_stderr,none": 0.033071627503231775,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552487,
						"acc_stderr,none": 0.03703667194552487,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.03980329854920432,
						"acc_stderr,none": 0.03980329854920432,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.0344500028917346,
						"acc_stderr,none": 0.0344500028917346,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.04265792110940588,
						"acc_stderr,none": 0.04265792110940588,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695623,
						"acc_stderr,none": 0.03737392962695623,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.03718489006818115,
						"acc_stderr,none": 0.03718489006818115,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.03253020905593336,
						"acc_stderr,none": 0.03253020905593336,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2441860465116279,
						"acc_norm,none": 0.2441860465116279,
						"acc_norm_stderr,none": 0.03285260554707745,
						"acc_stderr,none": 0.03285260554707745,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26520681265206814,
						"acc_norm,none": 0.26520681265206814,
						"acc_norm_stderr,none": 0.021801329069745193,
						"acc_stderr,none": 0.021801329069745193,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.22429906542056074,
						"acc_norm,none": 0.22429906542056074,
						"acc_norm_stderr,none": 0.02858058327333863,
						"acc_stderr,none": 0.02858058327333863,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2764227642276423,
						"acc_norm,none": 0.2764227642276423,
						"acc_norm_stderr,none": 0.0404901546062249,
						"acc_stderr,none": 0.0404901546062249,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.29508196721311475,
						"acc_norm,none": 0.29508196721311475,
						"acc_norm_stderr,none": 0.04146178164901212,
						"acc_stderr,none": 0.04146178164901212,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2714285714285714,
						"acc_norm,none": 0.2714285714285714,
						"acc_norm_stderr,none": 0.03076030982422605,
						"acc_stderr,none": 0.03076030982422605,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2611111111111111,
						"acc_norm,none": 0.2611111111111111,
						"acc_norm_stderr,none": 0.03283036633966841,
						"acc_stderr,none": 0.03283036633966841,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871163,
						"acc_stderr,none": 0.03152480234871163,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.04083221538649575,
						"acc_stderr,none": 0.04083221538649575,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.03724563619774632,
						"acc_stderr,none": 0.03724563619774632,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.041764667586049006,
						"acc_stderr,none": 0.041764667586049006,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26857142857142857,
						"acc_norm,none": 0.26857142857142857,
						"acc_norm_stderr,none": 0.033600151915923894,
						"acc_stderr,none": 0.033600151915923894,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.25118483412322273,
						"acc_norm,none": 0.25118483412322273,
						"acc_norm_stderr,none": 0.029927771242945208,
						"acc_stderr,none": 0.029927771242945208,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.23670212765957446,
						"acc_norm,none": 0.23670212765957446,
						"acc_norm_stderr,none": 0.02194989630475158,
						"acc_stderr,none": 0.02194989630475158,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.028490144114909487,
						"acc_stderr,none": 0.028490144114909487,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2471264367816092,
						"acc_norm,none": 0.2471264367816092,
						"acc_norm_stderr,none": 0.03279424038543969,
						"acc_stderr,none": 0.03279424038543969,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.03712537833614866,
						"acc_stderr,none": 0.03712537833614866,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.2610619469026549,
						"acc_norm,none": 0.2610619469026549,
						"acc_norm_stderr,none": 0.029280908211631696,
						"acc_stderr,none": 0.029280908211631696,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139405,
						"acc_stderr,none": 0.03374402644139405,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.031629303956979486,
						"acc_stderr,none": 0.031629303956979486,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.0327931779226895,
						"acc_stderr,none": 0.0327931779226895,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2546583850931677,
						"acc_norm,none": 0.2546583850931677,
						"acc_norm_stderr,none": 0.03444265995779324,
						"acc_stderr,none": 0.03444265995779324,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.21875,
						"acc_norm,none": 0.21875,
						"acc_norm_stderr,none": 0.032784644885244255,
						"acc_stderr,none": 0.032784644885244255,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.006567160417631986,
						"mcc_stderr,none": 0.03107043576947875
					},
					"copa": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.434593023255814,
						"likelihood_diff_stderr,none": 0.45642347923154075,
						"pct_stereotype,none": 0.5766249254621347,
						"pct_stereotype_stderr,none": 0.10036269156300633
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.5102862254025045,
						"likelihood_diff_stderr,none": 0.08206315760336408,
						"pct_stereotype,none": 0.6684555754323196,
						"pct_stereotype_stderr,none": 0.011499266322600413
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.6717032967032965,
						"likelihood_diff_stderr,none": 0.3639166371603216,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.193181818181818,
						"likelihood_diff_stderr,none": 1.9479274852014656,
						"pct_stereotype,none": 0.9090909090909091,
						"pct_stereotype_stderr,none": 0.0909090909090909
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.2384615384615385,
						"likelihood_diff_stderr,none": 0.5771185566262507,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.587890625,
						"likelihood_diff_stderr,none": 0.15515130808658495,
						"pct_stereotype,none": 0.66875,
						"pct_stereotype_stderr,none": 0.02635205567992741
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.4050925925925926,
						"likelihood_diff_stderr,none": 0.21176397070391634,
						"pct_stereotype,none": 0.6157407407407407,
						"pct_stereotype_stderr,none": 0.03317354514310742
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.8541666666666665,
						"likelihood_diff_stderr,none": 0.36930833238121935,
						"pct_stereotype,none": 0.8194444444444444,
						"pct_stereotype_stderr,none": 0.04564949854152483
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.253198818897638,
						"likelihood_diff_stderr,none": 0.1412786468750111,
						"pct_stereotype,none": 0.5905511811023622,
						"pct_stereotype_stderr,none": 0.021838590402568178
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.7725225225225225,
						"likelihood_diff_stderr,none": 0.30204224930965184,
						"pct_stereotype,none": 0.7837837837837838,
						"pct_stereotype_stderr,none": 0.039250566187156485
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.630376344086022,
						"likelihood_diff_stderr,none": 0.461140070922361,
						"pct_stereotype,none": 0.8602150537634409,
						"pct_stereotype_stderr,none": 0.036152622588464155
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 3.931578947368421,
						"likelihood_diff_stderr,none": 0.22144916347878182,
						"pct_stereotype,none": 0.6684210526315789,
						"pct_stereotype_stderr,none": 0.0342442478876195
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.360539654144305,
						"likelihood_diff_stderr,none": 0.08291477086254026,
						"pct_stereotype,none": 0.4841979725700656,
						"pct_stereotype_stderr,none": 0.012207198273771614
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.286111111111111,
						"likelihood_diff_stderr,none": 0.3184054644809773,
						"pct_stereotype,none": 0.43333333333333335,
						"pct_stereotype_stderr,none": 0.052526671187288064
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.0384615384615383,
						"likelihood_diff_stderr,none": 0.5834565944722334,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.84469696969697,
						"likelihood_diff_stderr,none": 0.5010701005583135,
						"pct_stereotype,none": 0.6363636363636364,
						"pct_stereotype_stderr,none": 0.05966637484671758
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.883956386292835,
						"likelihood_diff_stderr,none": 0.16174308124943718,
						"pct_stereotype,none": 0.4797507788161994,
						"pct_stereotype_stderr,none": 0.027927918885132314
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.816205533596838,
						"likelihood_diff_stderr,none": 0.20452177916964934,
						"pct_stereotype,none": 0.32806324110671936,
						"pct_stereotype_stderr,none": 0.029576223219432405
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.9166666666666665,
						"likelihood_diff_stderr,none": 0.5237185344117438,
						"pct_stereotype,none": 0.625,
						"pct_stereotype_stderr,none": 0.05745481997211521
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.0369565217391306,
						"likelihood_diff_stderr,none": 0.15585106819240355,
						"pct_stereotype,none": 0.3630434782608696,
						"pct_stereotype_stderr,none": 0.022445426974212864
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.675,
						"likelihood_diff_stderr,none": 0.34988624608883356,
						"pct_stereotype,none": 0.7304347826086957,
						"pct_stereotype_stderr,none": 0.041559491385799514
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.3543956043956045,
						"likelihood_diff_stderr,none": 0.2736122140591041,
						"pct_stereotype,none": 0.8021978021978022,
						"pct_stereotype_stderr,none": 0.04198895203196222
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.5197704081632653,
						"likelihood_diff_stderr,none": 0.27351779400731535,
						"pct_stereotype,none": 0.6122448979591837,
						"pct_stereotype_stderr,none": 0.03489185364347385
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.08759842519685039,
						"exact_match_stderr,none": 0.0062731576336123905
					},
					"glue": {
						"acc,none": 0.4479546430605564,
						"acc_stderr,none": 0.07085107342766765,
						"alias": "glue",
						"f1,none": 0.3718310013716583,
						"f1_stderr,none": 0.0014146435259075004,
						"mcc,none": -0.006567160417631986,
						"mcc_stderr,none": 0.0009653719789053045
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.008339651250947688,
						"exact_match_stderr,get-answer": 0.002504942226860539
					},
					"hellaswag": {
						"acc,none": 0.5059749053973313,
						"acc_norm,none": 0.6725751842262497,
						"acc_norm_stderr,none": 0.004683146373232275,
						"acc_stderr,none": 0.004989425133377904,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.09841178169217442,
						"acc_norm,none": 0.09841178169217442,
						"acc_norm_stderr,none": 0.06433910675649089,
						"acc_stderr,none": 0.06433910675649089,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.03861229196653697,
						"acc_stderr,none": 0.03861229196653697,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.09,
						"acc_norm,none": 0.09,
						"acc_norm_stderr,none": 0.009054390204866439,
						"acc_stderr,none": 0.009054390204866439,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.076,
						"acc_norm,none": 0.076,
						"acc_norm_stderr,none": 0.00838416926679639,
						"acc_stderr,none": 0.00838416926679639,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.194,
						"acc_norm,none": 0.194,
						"acc_norm_stderr,none": 0.012510816141264368,
						"acc_stderr,none": 0.012510816141264368,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.194,
						"acc_norm,none": 0.194,
						"acc_norm_stderr,none": 0.012510816141264385,
						"acc_stderr,none": 0.012510816141264385,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.17166666666666666,
						"acc_norm,none": 0.17166666666666666,
						"acc_norm_stderr,none": 0.015407498890924081,
						"acc_stderr,none": 0.015407498890924081,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.01,
						"acc_norm,none": 0.01,
						"acc_norm_stderr,none": 0.0031480009386767754,
						"acc_stderr,none": 0.0031480009386767754,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.005,
						"acc_norm,none": 0.005,
						"acc_norm_stderr,none": 0.002231586874844882,
						"acc_stderr,none": 0.002231586874844882,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.022,
						"acc_norm,none": 0.022,
						"acc_norm_stderr,none": 0.0046408552592747026,
						"acc_stderr,none": 0.0046408552592747026,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.033,
						"acc_norm,none": 0.033,
						"acc_norm_stderr,none": 0.005651808820452374,
						"acc_stderr,none": 0.005651808820452374,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.040347329239296424,
						"acc_stderr,none": 0.040347329239296424,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.018,
						"acc_norm,none": 0.018,
						"acc_norm_stderr,none": 0.004206387249611468,
						"acc_stderr,none": 0.004206387249611468,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.02,
						"acc_norm,none": 0.02,
						"acc_norm_stderr,none": 0.004429403980178343,
						"acc_stderr,none": 0.004429403980178343,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.179,
						"acc_norm,none": 0.179,
						"acc_norm_stderr,none": 0.012128730605719116,
						"acc_stderr,none": 0.012128730605719116,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.027,
						"acc_norm,none": 0.027,
						"acc_norm_stderr,none": 0.005128089049275285,
						"acc_stderr,none": 0.005128089049275285,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.132,
						"acc_norm,none": 0.132,
						"acc_norm_stderr,none": 0.010709373963528024,
						"acc_stderr,none": 0.010709373963528024,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.112,
						"acc_norm,none": 0.112,
						"acc_norm_stderr,none": 0.009977753031397236,
						"acc_stderr,none": 0.009977753031397236,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.083,
						"acc_norm,none": 0.083,
						"acc_norm_stderr,none": 0.008728527206074796,
						"acc_stderr,none": 0.008728527206074796,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.069,
						"acc_norm,none": 0.069,
						"acc_norm_stderr,none": 0.008018934050315145,
						"acc_stderr,none": 0.008018934050315145,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.04093601807403326,
						"acc_stderr,none": 0.04093601807403326,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.021,
						"acc_norm,none": 0.021,
						"acc_norm_stderr,none": 0.004536472151306492,
						"acc_stderr,none": 0.004536472151306492,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.029,
						"acc_norm,none": 0.029,
						"acc_norm_stderr,none": 0.005309160685756978,
						"acc_stderr,none": 0.005309160685756978,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.057,
						"acc_norm,none": 0.057,
						"acc_norm_stderr,none": 0.007335175853706822,
						"acc_stderr,none": 0.007335175853706822,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.231,
						"acc_norm,none": 0.231,
						"acc_norm_stderr,none": 0.013334797216936426,
						"acc_stderr,none": 0.013334797216936426,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.071,
						"acc_norm,none": 0.071,
						"acc_norm_stderr,none": 0.00812557844248791,
						"acc_stderr,none": 0.00812557844248791,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.012285191326386708,
						"acc_stderr,none": 0.012285191326386708,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.13833333333333334,
						"acc_norm,none": 0.13833333333333334,
						"acc_norm_stderr,none": 0.014106512439024638,
						"acc_stderr,none": 0.014106512439024638,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.101,
						"acc_norm,none": 0.101,
						"acc_norm_stderr,none": 0.009533618929340995,
						"acc_stderr,none": 0.009533618929340995,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.106,
						"acc_norm,none": 0.106,
						"acc_norm_stderr,none": 0.009739551265785134,
						"acc_stderr,none": 0.009739551265785134,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.062,
						"acc_norm,none": 0.062,
						"acc_norm_stderr,none": 0.007629823996280313,
						"acc_stderr,none": 0.007629823996280313,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.078,
						"acc_norm,none": 0.078,
						"acc_norm_stderr,none": 0.008484573530118585,
						"acc_stderr,none": 0.008484573530118585,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22666666666666666,
						"acc_norm,none": 0.22666666666666666,
						"acc_norm_stderr,none": 0.024212609617951908,
						"acc_stderr,none": 0.024212609617951908,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.01339490288966001,
						"acc_stderr,none": 0.01339490288966001,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.04,
						"acc_norm,none": 0.04,
						"acc_norm_stderr,none": 0.0061998740663370576,
						"acc_stderr,none": 0.0061998740663370576,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.113,
						"acc_norm,none": 0.113,
						"acc_norm_stderr,none": 0.010016552866696839,
						"acc_stderr,none": 0.010016552866696839,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.027234326551496862,
						"acc_stderr,none": 0.027234326551496862,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.14,
						"acc_norm,none": 0.14,
						"acc_norm_stderr,none": 0.010978183844357801,
						"acc_stderr,none": 0.010978183844357801,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.151,
						"acc_norm,none": 0.151,
						"acc_norm_stderr,none": 0.011328165223341676,
						"acc_stderr,none": 0.011328165223341676,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.025,
						"acc_norm,none": 0.025,
						"acc_norm_stderr,none": 0.004939574819698464,
						"acc_stderr,none": 0.004939574819698464,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.46744135058101294,
						"acc_norm,none": 0.43,
						"acc_norm_stderr,none": 0.0004911823647294574,
						"acc_stderr,none": 0.04926168838119018,
						"alias": "kobest",
						"f1,none": 0.36610912071793955,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.489,
						"acc_stderr,none": 0.015815471195292693,
						"alias": " - kobest_copa",
						"f1,none": 0.4882215850308319,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.29,
						"acc_norm,none": 0.43,
						"acc_norm_stderr,none": 0.022162634426652835,
						"acc_stderr,none": 0.020313179231745193,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.28602328046703235,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.44836272040302266,
						"acc_stderr,none": 0.024991594109841586,
						"alias": " - kobest_sentineg",
						"f1,none": 0.3928943013358099,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6523384436250728,
						"acc_stderr,none": 0.01252307762641048,
						"alias": "lambada",
						"perplexity,none": 4.7331850055618805,
						"perplexity_stderr,none": 0.26420318619980676
					},
					"lambada_cloze": {
						"acc,none": 0.03755094119930138,
						"acc_stderr,none": 0.004657681211612462,
						"alias": "lambada_cloze",
						"perplexity,none": 406.28564830227816,
						"perplexity_stderr,none": 83.71095243163587
					},
					"lambada_multilingual": {
						"acc,none": 0.3881622355909179,
						"acc_stderr,none": 0.08375300012280831,
						"alias": "lambada_multilingual",
						"perplexity,none": 94.24299539565746,
						"perplexity_stderr,none": 30.028815681145435
					},
					"lambada_openai": {
						"acc,none": 0.6737822627595575,
						"acc_stderr,none": 0.006531691215150968,
						"alias": " - lambada_openai",
						"perplexity,none": 4.250598313839249,
						"perplexity_stderr,none": 0.09252673510748143
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.029885503590141665,
						"acc_stderr,none": 0.002372213970074895,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 241.54217767193785,
						"perplexity_stderr,none": 7.95456524250069
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.28041917329710847,
						"acc_stderr,none": 0.006258290472632361,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 140.0133340076818,
						"perplexity_stderr,none": 8.652865280352644
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6741703861828061,
						"acc_stderr,none": 0.006529684317476097,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.25205289918128,
						"perplexity_stderr,none": 0.09260941569288471
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.3060353192315156,
						"acc_stderr,none": 0.006420465728110188,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 124.4520996487743,
						"perplexity_stderr,none": 7.245651841086687
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.36095478362119154,
						"acc_stderr,none": 0.006691203954844635,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 76.7582261318957,
						"perplexity_stderr,none": 4.452752530346086
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.3192315156219678,
						"acc_stderr,none": 0.0064947834277386774,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 125.73926429075424,
						"perplexity_stderr,none": 7.919251868167341
					},
					"lambada_standard": {
						"acc,none": 0.6310886862022123,
						"acc_stderr,none": 0.006722305683426825,
						"alias": " - lambada_standard",
						"perplexity,none": 5.215321656840184,
						"perplexity_stderr,none": 0.12168536800627908
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.04521637880846109,
						"acc_stderr,none": 0.0028947591959917196,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 571.0291189326185,
						"perplexity_stderr,none": 19.501093191433405
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.24236641221374045,
						"exact_match_stderr,get-answer": 0.010811295412400642
					},
					"logiqa": {
						"acc,none": 0.23963133640552994,
						"acc_norm,none": 0.2887864823348694,
						"acc_norm_stderr,none": 0.017775906336539235,
						"acc_stderr,none": 0.016742766935101436,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.22646310432569974,
						"acc_norm,none": 0.26590330788804073,
						"acc_norm_stderr,none": 0.011146805188415496,
						"acc_stderr,none": 0.010559689596278005,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2442211055276382,
						"acc_norm,none": 0.2425460636515913,
						"acc_norm_stderr,none": 0.007846497115068572,
						"acc_stderr,none": 0.007864834115502725,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3487608557509002,
						"acc_stderr,none": 0.004904843072711637,
						"alias": "mc_taco",
						"f1,none": 0.5057471264367817,
						"f1_stderr,none": 0.005487849026142363
					},
					"medmcqa": {
						"acc,none": 0.2318909873296677,
						"acc_norm,none": 0.2318909873296677,
						"acc_norm_stderr,none": 0.006526214608958166,
						"acc_stderr,none": 0.006526214608958166,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.2694422623723488,
						"acc_norm,none": 0.2694422623723488,
						"acc_norm_stderr,none": 0.01243989099896177,
						"acc_stderr,none": 0.01243989099896177,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.2503916820965675,
						"acc_stderr,none": 0.03702168702154798,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.28888888888888886,
						"acc_stderr,none": 0.03915450630414251,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.27631578947368424,
						"acc_stderr,none": 0.03639057569952925,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.21509433962264152,
						"acc_stderr,none": 0.025288394502891363,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.24305555555555555,
						"acc_stderr,none": 0.03586879280080341,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.04093601807403326,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2947976878612717,
						"acc_stderr,none": 0.034765996075164785,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171452,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.225531914893617,
						"acc_stderr,none": 0.027321078417387533,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.04049339297748142,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2413793103448276,
						"acc_stderr,none": 0.03565998174135303,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.23809523809523808,
						"acc_stderr,none": 0.021935878081184763,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.1984126984126984,
						"acc_stderr,none": 0.03567016675276862,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.22258064516129034,
						"acc_stderr,none": 0.02366421667164253,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.29064039408866993,
						"acc_stderr,none": 0.031947400722655395,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.23636363636363636,
						"acc_stderr,none": 0.03317505930009179,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.21212121212121213,
						"acc_stderr,none": 0.02912652283458682,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.21761658031088082,
						"acc_stderr,none": 0.02977866303775296,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.21025641025641026,
						"acc_stderr,none": 0.020660597485026938,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.026719240783712173,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.19747899159663865,
						"acc_stderr,none": 0.02585916412205146,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23841059602649006,
						"acc_stderr,none": 0.0347918557259966,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.23302752293577983,
						"acc_stderr,none": 0.018125669180861503,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.20833333333333334,
						"acc_stderr,none": 0.027696910713093936,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.028867431449849316,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.25738396624472576,
						"acc_stderr,none": 0.028458820991460302,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.2600896860986547,
						"acc_stderr,none": 0.029442495585857487,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2366412213740458,
						"acc_stderr,none": 0.03727673575596919,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2563230605738576,
						"acc_stderr,none": 0.033748847743544266,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.36363636363636365,
						"acc_stderr,none": 0.043913262867240704,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.040191074725573483,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.294478527607362,
						"acc_stderr,none": 0.03581165790474082,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.26785714285714285,
						"acc_stderr,none": 0.04203277291467761,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.20388349514563106,
						"acc_stderr,none": 0.039891398595317706,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.27350427350427353,
						"acc_stderr,none": 0.02920254015343118,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.046482319871173156,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2707535121328225,
						"acc_stderr,none": 0.01588988836256049,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2658959537572254,
						"acc_stderr,none": 0.02378620325550829,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24134078212290502,
						"acc_stderr,none": 0.014310999547961464,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.24836601307189543,
						"acc_stderr,none": 0.02473998135511359,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.25490827164467333,
						"acc_stderr,none": 0.03891388181415792,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.21221864951768488,
						"acc_stderr,none": 0.023222756797435136,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.27469135802469136,
						"acc_stderr,none": 0.024836057868294677,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2730496453900709,
						"acc_stderr,none": 0.026577860943307854,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2692307692307692,
						"acc_stderr,none": 0.011328734403140313,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.16176470588235295,
						"acc_stderr,none": 0.022368672562886757,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.27450980392156865,
						"acc_stderr,none": 0.0180540274588152,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2818181818181818,
						"acc_stderr,none": 0.043091187099464585,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2530612244897959,
						"acc_stderr,none": 0.027833023871399694,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2365940851478713,
						"acc_stderr,none": 0.03257627485114301,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.22885572139303484,
						"acc_stderr,none": 0.029705284056772436,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2505550269584523,
						"acc_stderr,none": 0.042181165505982395,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322674,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.27710843373493976,
						"acc_stderr,none": 0.034843315926805875,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.033773102522091945,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3303107488537952,
						"acc_stderr,none": 0.004747609504828331,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3340113913751017,
						"acc_stderr,none": 0.004756803283728464,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6151960784313726,
						"acc_stderr,none": 0.0241173351823918,
						"alias": "mrpc",
						"f1,none": 0.7535321821036107,
						"f1_stderr,none": 0.01904278461745188
					},
					"multimedqa": {
						"acc,none": 0.265152590489709,
						"acc_norm,none": 0.24448362505753568,
						"acc_norm_stderr,none": 0.00011512370899131588,
						"acc_stderr,none": 0.0872894702734837,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5711633663366337,
						"acc_stderr,none": 0.007108690423137718,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6908389782663392,
						"mrr_stderr,none": 0.010280047322734015,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4345372460496614,
						"r@2_stderr,none": 0.016662642265942003
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6429646369364138,
						"mrr_stderr,none": 0.010375180918330791,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4706546275395034,
						"r@2_stderr,none": 0.016778343895001435
					},
					"openbookqa": {
						"acc,none": 0.27,
						"acc_norm,none": 0.372,
						"acc_norm_stderr,none": 0.0216371979857224,
						"acc_stderr,none": 0.019874354831287473,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.417,
						"acc_stderr,none": 0.011027978425535509,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.382,
						"acc_stderr,none": 0.010867246593514927,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4455,
						"acc_stderr,none": 0.011116504096687393,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.011172305500884877,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.011107641056719627,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.551,
						"acc_stderr,none": 0.011124809242874423,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5385,
						"acc_stderr,none": 0.011149934327957058,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4877142857142857,
						"acc_stderr,none": 0.049786073170358214,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7611534276387377,
						"acc_norm,none": 0.7622415669205659,
						"acc_norm_stderr,none": 0.009932525779525485,
						"acc_stderr,none": 0.009948120385337496,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.26457087959009395,
						"acc_norm,none": 0.29253842869342445,
						"acc_norm_stderr,none": 0.003323655792434637,
						"acc_stderr,none": 0.0032226607010812924,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.594,
						"acc_stderr,none": 0.02198396209008634,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7321891499583415,
						"acc_norm,none": 0.52267854559101,
						"acc_norm_stderr,none": 0.0044258058237959235,
						"acc_stderr,none": 0.13169555347481052,
						"alias": "pythia",
						"bits_per_byte,none": 0.6767232860854291,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.598505032488132,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.250598313839249,
						"perplexity_stderr,none": 0.09252673510748143,
						"word_perplexity,none": 12.284305307357904,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4024822695035461,
						"acc_norm,none": 0.4627659574468085,
						"acc_norm_stderr,none": 0.0499272283431575,
						"acc_stderr,none": 0.040562881423311964,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4666666666666667,
						"acc_norm,none": 0.575,
						"acc_norm_stderr,none": 0.04531634835874828,
						"acc_stderr,none": 0.0457329560380023,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.40625,
						"acc_norm,none": 0.48125,
						"acc_norm_stderr,none": 0.03962468875738331,
						"acc_stderr,none": 0.03894932504400619,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3732394366197183,
						"acc_norm,none": 0.40492957746478875,
						"acc_norm_stderr,none": 0.02917969275220336,
						"acc_stderr,none": 0.02875089548898921,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5090609555189456,
						"acc_stderr,none": 0.006764299567764281,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.4855058125154588,
						"acc_stderr,none": 0.0024856555926508106,
						"alias": "qqp",
						"f1,none": 0.3684610013055227,
						"f1_stderr,none": 0.0033948792712426507
					},
					"race": {
						"acc,none": 0.3799043062200957,
						"acc_stderr,none": 0.01502160080493565,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5523465703971119,
						"acc_stderr,none": 0.02993107036293953,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.898,
						"acc_norm,none": 0.852,
						"acc_norm_stderr,none": 0.011234866364235258,
						"acc_stderr,none": 0.009575368801653918,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5487364620938628,
						"acc_stderr,none": 0.029953149241808946,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.7706422018348624,
						"acc_stderr,none": 0.014245381090651236,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5441367589723083,
						"acc_norm,none": 0.7406777966610018,
						"acc_norm_stderr,none": 0.003098598002697787,
						"acc_stderr,none": 0.003521292014446527,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5515290672523377,
						"acc_stderr,none": 0.022361709180050694,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5667067307692307,
						"acc_stderr,none": 0.004959519717464513,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.5944055944055944,
						"acc_stderr,none": 0.004943298848078168,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.49519607843137253,
						"acc_stderr,none": 0.004950751896566983,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3079820170872586,
						"acc_stderr,none": 0.04306092106663105,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.29008567931456547,
						"bleu_acc_stderr,none": 0.0002523725220234942,
						"bleu_diff,none": -8.57088064642411,
						"bleu_diff_stderr,none": 0.6576073444054554,
						"bleu_max,none": 23.824101893552733,
						"bleu_max_stderr,none": 0.5554250697572132,
						"rouge1_acc,none": 0.2717258261933905,
						"rouge1_acc_stderr,none": 0.00024251335977072588,
						"rouge1_diff,none": -10.858328818075696,
						"rouge1_diff_stderr,none": 0.8205581843037311,
						"rouge1_max,none": 48.84229807754479,
						"rouge1_max_stderr,none": 0.7382658420689897,
						"rouge2_acc,none": 0.204406364749082,
						"rouge2_acc_stderr,none": 0.00019929461127346601,
						"rouge2_diff,none": -13.191388596437479,
						"rouge2_diff_stderr,none": 1.0841061883387555,
						"rouge2_max,none": 31.615412268381807,
						"rouge2_max_stderr,none": 0.9677786270619678,
						"rougeL_acc,none": 0.26560587515299877,
						"rougeL_acc_stderr,none": 0.00023904337529069867,
						"rougeL_diff,none": -11.114783151246234,
						"rougeL_diff_stderr,none": 0.8420572608514878,
						"rougeL_max,none": 45.927717086588885,
						"rougeL_max_stderr,none": 0.7574028486713349
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.29008567931456547,
						"bleu_acc_stderr,none": 0.01588623687420952,
						"bleu_diff,none": -8.57088064642411,
						"bleu_diff_stderr,none": 0.8109299257059487,
						"bleu_max,none": 23.824101893552733,
						"bleu_max_stderr,none": 0.7452684548249799,
						"rouge1_acc,none": 0.2717258261933905,
						"rouge1_acc_stderr,none": 0.01557284045287583,
						"rouge1_diff,none": -10.858328818075696,
						"rouge1_diff_stderr,none": 0.905846667104169,
						"rouge1_max,none": 48.84229807754479,
						"rouge1_max_stderr,none": 0.859223976660911,
						"rouge2_acc,none": 0.204406364749082,
						"rouge2_acc_stderr,none": 0.014117174337432616,
						"rouge2_diff,none": -13.191388596437479,
						"rouge2_diff_stderr,none": 1.0412042010762133,
						"rouge2_max,none": 31.615412268381807,
						"rouge2_max_stderr,none": 0.9837574025449404,
						"rougeL_acc,none": 0.26560587515299877,
						"rougeL_acc_stderr,none": 0.015461027627253586,
						"rougeL_diff,none": -11.114783151246234,
						"rougeL_diff_stderr,none": 0.9176367804591792,
						"rougeL_max,none": 45.927717086588885,
						"rougeL_max_stderr,none": 0.8702889455067983
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2215422276621787,
						"acc_stderr,none": 0.01453786760130114,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.35120191179979854,
						"acc_stderr,none": 0.013571457775934787,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.08759842519685039,
						"exact_match_stderr,none": 0.0062731576336123905
					},
					"wic": {
						"acc,none": 0.4890282131661442,
						"acc_stderr,none": 0.01980595108597941,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6767232860854291,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.598505032488132,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 12.284305307357904,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6471981057616417,
						"acc_stderr,none": 0.013429728101788954,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.4326923076923077,
						"acc_stderr,none": 0.04881803687006195,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8131868131868132,
						"acc_stderr,none": 0.023632761722644557,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5229090909090909,
						"acc_stderr,none": 0.027215770707865124,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.02238235778196214,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.022378596989230785,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668086,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407053,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.02235279165091416,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.022261697292270132,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.022296238348407046,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044296,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.022357273881016403,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.3784738955823293,
						"acc_stderr,none": 0.054985199168171614,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.009476976849778591,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3550200803212851,
						"acc_stderr,none": 0.009591512730974288,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.42811244979919677,
						"acc_stderr,none": 0.009917947421067469,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939166,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5602409638554217,
						"acc_stderr,none": 0.009949067285169363,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4493975903614458,
						"acc_stderr,none": 0.009970615649588137,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4506024096385542,
						"acc_stderr,none": 0.009973042774811681,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3417670682730924,
						"acc_stderr,none": 0.009506977398287632,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.37670682730923694,
						"acc_stderr,none": 0.009712599529552994,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3385542168674699,
						"acc_stderr,none": 0.009485250208516887,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3317269076305221,
						"acc_stderr,none": 0.00943745490032912,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3610441767068273,
						"acc_stderr,none": 0.009627269742195714,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3365461847389558,
						"acc_stderr,none": 0.009471423054177131,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3449799196787149,
						"acc_stderr,none": 0.009528219800053311,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3305220883534137,
						"acc_stderr,none": 0.00942878910928982,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5250586607304013,
						"acc_stderr,none": 0.06681237456634356,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.4870946393117141,
						"acc_stderr,none": 0.01286283860572848,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7352746525479815,
						"acc_stderr,none": 0.01135361301038959,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5837193911317009,
						"acc_stderr,none": 0.012685473350967527,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.4937127729980146,
						"acc_stderr,none": 0.01286610802121821,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5095962938451357,
						"acc_stderr,none": 0.012864755260408957,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.4877564526803441,
						"acc_stderr,none": 0.012863267059205548,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.46591661151555264,
						"acc_stderr,none": 0.012837195610619431,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.4930509596293845,
						"acc_stderr,none": 0.012865882570960722,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5043017868960953,
						"acc_stderr,none": 0.012866649085718848,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5268034414295168,
						"acc_stderr,none": 0.012848623899505763,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.4884182660489742,
						"acc_stderr,none": 0.012863672949335882,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7104967408406383,
						"acc_stderr,none": 0.08278874226692942,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8520430107526882,
						"acc_stderr,none": 0.007365130391822567,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6626506024096386,
						"acc_stderr,none": 0.05221260262032129,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5370177267987487,
						"acc_stderr,none": 0.016109933135138657,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6615969581749049,
						"acc_stderr,none": 0.029232316577302644,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5333333333333333,
						"acc_stderr,none": 0.0281538589456489,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.5317460317460317,
						"acc_stderr,none": 0.02224891667424951,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "facebook/opt-6.7b"
	},
	"google/flan-t5-base": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.3246899661781285,
						"acc_norm,none": 0.34047350620067646,
						"acc_norm_stderr,none": 0.007892493489692284,
						"acc_stderr,none": 0.007740747326564025,
						"alias": "ai2_arc"
					},
					"anli": {
						"acc,none": 0.330625,
						"acc_stderr,none": 0.008318832851265745,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.00045,
						"acc_stderr,none": 0.00014989991658602244,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8045223880597014,
						"acc_stderr,none": 0.0013903719177580135,
						"alias": "blimp"
					},
					"ceval-valid": {
						"acc,none": 0.23551263001485884,
						"acc_norm,none": 0.23551263001485884,
						"acc_norm_stderr,none": 0.01160767270232024,
						"acc_stderr,none": 0.01160767270232024,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2524607149024348,
						"acc_norm,none": 0.2524607149024348,
						"acc_norm_stderr,none": 0.00404172163002656,
						"acc_stderr,none": 0.00404172163002656,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.472830948121646,
						"likelihood_diff_stderr,none": 0.05713403988641092,
						"pct_stereotype,none": 0.4992546213476446,
						"pct_stereotype_stderr,none": 0.0060375937570211775
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.003937007874015748,
						"exact_match_stderr,none": 0.0013895416930409094
					},
					"kobest": {
						"acc,none": 0.4757728568296426,
						"acc_stderr,none": 0.007301352107783242,
						"alias": "kobest",
						"f1,none": 0.35413473412910773,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.2786726178924898,
						"acc_stderr,none": 0.004394885803211236,
						"alias": "lambada",
						"perplexity,none": 1124.8999970233383,
						"perplexity_stderr,none": 83.20811103765554
					},
					"lambada_cloze": {
						"acc,none": 0.3291286629148069,
						"acc_stderr,none": 0.004628468145180275,
						"alias": "lambada_cloze",
						"perplexity,none": 336.3842993427113,
						"perplexity_stderr,none": 18.87420451903014
					},
					"lambada_multilingual": {
						"acc,none": 0.07149233456239085,
						"acc_stderr,none": 0.0015229333220403854,
						"alias": "lambada_multilingual",
						"perplexity,none": 12450564.061607182,
						"perplexity_stderr,none": 1249467.7816388514
					},
					"mmlu": {
						"acc,none": 0.23543654750035609,
						"acc_stderr,none": 0.0035728818178558295,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2514346439957492,
						"acc_stderr,none": 0.0063197577518516014,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.24654007080785323,
						"acc_stderr,none": 0.0077079755124050495,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.22326941826454338,
						"acc_stderr,none": 0.007503203481714887,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.21249603552172533,
						"acc_stderr,none": 0.00727642827845504,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2942512420156139,
						"acc_stderr,none": 0.005412562220361505,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5186428571428572,
						"acc_stderr,none": 0.004208100724663781,
						"alias": "pawsx"
					},
					"qa4mre": {
						"acc,none": 0.2801418439716312,
						"acc_norm,none": 0.3280141843971631,
						"acc_norm_stderr,none": 0.01980781501162305,
						"acc_stderr,none": 0.01889511788229077,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5654720308808359,
						"acc_stderr,none": 0.0028197305099225186,
						"alias": "sycophancy"
					},
					"xcopa": {
						"acc,none": 0.5109090909090909,
						"acc_stderr,none": 0.006742573432585962,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.34471218206157966,
						"acc_stderr,none": 0.002455367656551436,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.4840863967270321,
						"acc_stderr,none": 0.0038731184107945655,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.5088783996403686,
						"acc_stderr,none": 0.007498296937388405,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.3246899661781285,
						"acc_norm,none": 0.34047350620067646,
						"acc_norm_stderr,none": 0.007892493489692284,
						"acc_stderr,none": 0.007740747326564025,
						"alias": "ai2_arc"
					},
					"anli": {
						"acc,none": 0.330625,
						"acc_stderr,none": 0.008318832851265745,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.343,
						"acc_stderr,none": 0.015019206922356953,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.324,
						"acc_stderr,none": 0.014806864733738857,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3258333333333333,
						"acc_stderr,none": 0.013535422043417454,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.20733788395904437,
						"acc_norm,none": 0.25426621160409557,
						"acc_norm_stderr,none": 0.012724999945157741,
						"acc_stderr,none": 0.011846905782971357,
						"alias": " - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.38257575757575757,
						"acc_norm,none": 0.382996632996633,
						"acc_norm_stderr,none": 0.009974920384536482,
						"acc_stderr,none": 0.009972837790531477,
						"alias": " - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.00045,
						"acc_stderr,none": 0.00014989991658602244,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.002,
						"acc_stderr,none": 0.0009992493430694776,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.002,
						"acc_stderr,none": 0.000999249343069498,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000158,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8045223880597014,
						"acc_stderr,none": 0.0013903719177580135,
						"alias": "blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847164,
						"alias": " - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987277,
						"alias": " - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": " - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.788,
						"acc_stderr,none": 0.012931481864938062,
						"alias": " - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936711,
						"alias": " - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.747,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.598,
						"acc_stderr,none": 0.015512467135715077,
						"alias": " - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.01360735683959812,
						"alias": " - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.812,
						"acc_stderr,none": 0.012361586015103766,
						"alias": " - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256569,
						"alias": " - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611488,
						"alias": " - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.01009340759490461,
						"alias": " - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295436,
						"alias": " - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.0072129762946392395,
						"alias": " - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653916,
						"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491122,
						"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.971,
						"acc_stderr,none": 0.005309160685756966,
						"alias": " - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.00896305396259207,
						"alias": " - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.769,
						"acc_stderr,none": 0.01333479721693644,
						"alias": " - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.734,
						"acc_stderr,none": 0.013979965645145158,
						"alias": " - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386695,
						"alias": " - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890136,
						"alias": " - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.751,
						"acc_stderr,none": 0.013681600278702293,
						"alias": " - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998505,
						"alias": " - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.236,
						"acc_stderr,none": 0.013434451402438683,
						"alias": " - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.837,
						"acc_stderr,none": 0.011686212712746839,
						"alias": " - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.701,
						"acc_stderr,none": 0.014484778521220475,
						"alias": " - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.641,
						"acc_stderr,none": 0.015177264224798596,
						"alias": " - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.781,
						"acc_stderr,none": 0.01308473195026199,
						"alias": " - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832008,
						"alias": " - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042956,
						"alias": " - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.00812557844248792,
						"alias": " - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.00909954953840023,
						"alias": " - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.015819299929208316,
						"alias": " - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.817,
						"acc_stderr,none": 0.012233587399477828,
						"alias": " - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.456,
						"acc_stderr,none": 0.015757928553979172,
						"alias": " - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.015645087688113814,
						"alias": " - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.014658474370509005,
						"alias": " - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.00745483565040673,
						"alias": " - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.012259457340938574,
						"alias": " - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.0103548647129367,
						"alias": " - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523736,
						"alias": " - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.565,
						"acc_stderr,none": 0.015685057252717204,
						"alias": " - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": " - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426109,
						"alias": " - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.0047427305946567975,
						"alias": " - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.721,
						"acc_stderr,none": 0.014190150117612032,
						"alias": " - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.605,
						"acc_stderr,none": 0.015466551464829345,
						"alias": " - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.452,
						"acc_stderr,none": 0.015746235865880677,
						"alias": " - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.0072129762946392395,
						"alias": " - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.01028132801274738,
						"alias": " - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.003299983316607816,
						"alias": " - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.695,
						"acc_stderr,none": 0.01456664639466438,
						"alias": " - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.407,
						"acc_stderr,none": 0.015543249100255542,
						"alias": " - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.782,
						"acc_stderr,none": 0.013063179040595285,
						"alias": " - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.827,
						"acc_stderr,none": 0.011967214137559924,
						"alias": " - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.663,
						"acc_stderr,none": 0.01495508791865361,
						"alias": " - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": " - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.864,
						"acc_stderr,none": 0.01084535023047299,
						"alias": " - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.012841374572096933,
						"alias": " - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617322,
						"alias": " - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": " - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": " - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380696,
						"alias": " - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.005568393575081344,
						"alias": " - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.631,
						"acc_stderr,none": 0.015266698139154617,
						"alias": " - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.482,
						"acc_stderr,none": 0.015809045699406728,
						"alias": " - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.5314984709480123,
						"acc_stderr,none": 0.00872768484861531,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.5535714285714286,
						"acc_stderr,none": 0.06703189227942397,
						"alias": "cb",
						"f1,none": 0.377003869541183,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.23551263001485884,
						"acc_norm,none": 0.23551263001485884,
						"acc_norm_stderr,none": 0.01160767270232024,
						"acc_stderr,none": 0.01160767270232024,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757576,
						"acc_stderr,none": 0.07575757575757576,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0879391124952055,
						"acc_stderr,none": 0.0879391124952055,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.23404255319148937,
						"acc_norm,none": 0.23404255319148937,
						"acc_norm_stderr,none": 0.062426763436828805,
						"acc_stderr,none": 0.062426763436828805,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445797,
						"acc_stderr,none": 0.06180629713445797,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502246,
						"acc_stderr,none": 0.07401656182502246,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.0686105685212965,
						"acc_stderr,none": 0.0686105685212965,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.16129032258064516,
						"acc_norm,none": 0.16129032258064516,
						"acc_norm_stderr,none": 0.06715051611181073,
						"acc_stderr,none": 0.06715051611181073,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.10942433098048308,
						"acc_stderr,none": 0.10942433098048308,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.06007385040937022,
						"acc_stderr,none": 0.06007385040937022,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.13636363636363635,
						"acc_norm,none": 0.13636363636363635,
						"acc_norm_stderr,none": 0.0748867700952649,
						"acc_stderr,none": 0.0748867700952649,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.09829463743659808,
						"acc_stderr,none": 0.09829463743659808,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755131,
						"acc_stderr,none": 0.08780518530755131,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.09176629354822471,
						"acc_stderr,none": 0.09176629354822471,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996394,
						"acc_stderr,none": 0.08081046758996394,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966263,
						"acc_stderr,none": 0.06520506636966263,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736842,
						"acc_stderr,none": 0.05263157894736842,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628253,
						"acc_stderr,none": 0.05817221556628253,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.06957698714453994,
						"acc_stderr,none": 0.06957698714453994,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.06148754619013454,
						"acc_stderr,none": 0.06148754619013454,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2524607149024348,
						"acc_norm,none": 0.2524607149024348,
						"acc_norm_stderr,none": 0.00404172163002656,
						"acc_stderr,none": 0.00404172163002656,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.032793177922689494,
						"acc_stderr,none": 0.032793177922689494,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.033346454086653377,
						"acc_stderr,none": 0.033346454086653377,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03453131801885417,
						"acc_stderr,none": 0.03453131801885417,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2583732057416268,
						"acc_norm,none": 0.2583732057416268,
						"acc_norm_stderr,none": 0.030351822614803438,
						"acc_stderr,none": 0.030351822614803438,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865142,
						"acc_stderr,none": 0.03462157845865142,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2748091603053435,
						"acc_norm,none": 0.2748091603053435,
						"acc_norm_stderr,none": 0.03915345408847835,
						"acc_stderr,none": 0.03915345408847835,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.27205882352941174,
						"acc_norm,none": 0.27205882352941174,
						"acc_norm_stderr,none": 0.03830122520709327,
						"acc_stderr,none": 0.03830122520709327,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2803738317757009,
						"acc_norm,none": 0.2803738317757009,
						"acc_norm_stderr,none": 0.04362839933570101,
						"acc_stderr,none": 0.04362839933570101,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25696594427244585,
						"acc_norm,none": 0.25696594427244585,
						"acc_norm_stderr,none": 0.02435085467633012,
						"acc_stderr,none": 0.02435085467633012,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.27450980392156865,
						"acc_norm,none": 0.27450980392156865,
						"acc_norm_stderr,none": 0.03132179803083292,
						"acc_stderr,none": 0.03132179803083292,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2849162011173184,
						"acc_norm,none": 0.2849162011173184,
						"acc_norm_stderr,none": 0.03383195081328524,
						"acc_stderr,none": 0.03383195081328524,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.23628691983122363,
						"acc_norm,none": 0.23628691983122363,
						"acc_norm_stderr,none": 0.0276521531441593,
						"acc_stderr,none": 0.0276521531441593,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.04522350077382031,
						"acc_stderr,none": 0.04522350077382031,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.04350546818999061,
						"acc_stderr,none": 0.04350546818999061,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04186091791394607,
						"acc_stderr,none": 0.04186091791394607,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.20952380952380953,
						"acc_norm,none": 0.20952380952380953,
						"acc_norm_stderr,none": 0.03990657150993187,
						"acc_stderr,none": 0.03990657150993187,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.04302548773959011,
						"acc_stderr,none": 0.04302548773959011,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.025546583236733526,
						"acc_stderr,none": 0.025546583236733526,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604246,
						"acc_stderr,none": 0.030587591351604246,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2982456140350877,
						"acc_norm,none": 0.2982456140350877,
						"acc_norm_stderr,none": 0.03508771929824562,
						"acc_stderr,none": 0.03508771929824562,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.03558926157606757,
						"acc_stderr,none": 0.03558926157606757,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2589928057553957,
						"acc_norm,none": 0.2589928057553957,
						"acc_norm_stderr,none": 0.03729198658164234,
						"acc_stderr,none": 0.03729198658164234,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.27672955974842767,
						"acc_norm,none": 0.27672955974842767,
						"acc_norm_stderr,none": 0.03559177035707935,
						"acc_stderr,none": 0.03559177035707935,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.22085889570552147,
						"acc_norm,none": 0.22085889570552147,
						"acc_norm_stderr,none": 0.03259177392742178,
						"acc_stderr,none": 0.03259177392742178,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.23255813953488372,
						"acc_norm,none": 0.23255813953488372,
						"acc_norm_stderr,none": 0.0323065408320345,
						"acc_stderr,none": 0.0323065408320345,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.02688368747322084,
						"acc_stderr,none": 0.02688368747322084,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.031156269519646847,
						"acc_stderr,none": 0.031156269519646847,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.25210084033613445,
						"acc_norm,none": 0.25210084033613445,
						"acc_norm_stderr,none": 0.028205545033277723,
						"acc_stderr,none": 0.028205545033277723,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2826086956521739,
						"acc_norm,none": 0.2826086956521739,
						"acc_norm_stderr,none": 0.02975452853823325,
						"acc_stderr,none": 0.02975452853823325,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.0391545063041425,
						"acc_stderr,none": 0.0391545063041425,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.23776223776223776,
						"acc_norm,none": 0.23776223776223776,
						"acc_norm_stderr,none": 0.0357250214181557,
						"acc_stderr,none": 0.0357250214181557,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2897727272727273,
						"acc_norm,none": 0.2897727272727273,
						"acc_norm_stderr,none": 0.03429323080239875,
						"acc_stderr,none": 0.03429323080239875,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2483221476510067,
						"acc_norm,none": 0.2483221476510067,
						"acc_norm_stderr,none": 0.0355134404169743,
						"acc_stderr,none": 0.0355134404169743,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.033341501981019615,
						"acc_stderr,none": 0.033341501981019615,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.0366143336041072,
						"acc_stderr,none": 0.0366143336041072,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.22033898305084745,
						"acc_norm,none": 0.22033898305084745,
						"acc_norm_stderr,none": 0.03831824849223319,
						"acc_stderr,none": 0.03831824849223319,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878284,
						"acc_stderr,none": 0.04122066502878284,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2937062937062937,
						"acc_norm,none": 0.2937062937062937,
						"acc_norm_stderr,none": 0.03822127078536156,
						"acc_stderr,none": 0.03822127078536156,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604673,
						"acc_stderr,none": 0.03893259610604673,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.03209281645145388,
						"acc_stderr,none": 0.03209281645145388,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2441860465116279,
						"acc_norm,none": 0.2441860465116279,
						"acc_norm_stderr,none": 0.03285260554707746,
						"acc_stderr,none": 0.03285260554707746,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26520681265206814,
						"acc_norm,none": 0.26520681265206814,
						"acc_norm_stderr,none": 0.021801329069745176,
						"acc_stderr,none": 0.021801329069745176,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.24766355140186916,
						"acc_norm,none": 0.24766355140186916,
						"acc_norm_stderr,none": 0.029576535293164487,
						"acc_stderr,none": 0.029576535293164487,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.04119323030208567,
						"acc_stderr,none": 0.04119323030208567,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2459016393442623,
						"acc_norm,none": 0.2459016393442623,
						"acc_norm_stderr,none": 0.039147319035957334,
						"acc_stderr,none": 0.039147319035957334,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.22380952380952382,
						"acc_norm,none": 0.22380952380952382,
						"acc_norm_stderr,none": 0.028830375135239766,
						"acc_stderr,none": 0.028830375135239766,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2611111111111111,
						"acc_norm,none": 0.2611111111111111,
						"acc_norm_stderr,none": 0.0328303663396684,
						"acc_stderr,none": 0.0328303663396684,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03174603174603176,
						"acc_stderr,none": 0.03174603174603176,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.21551724137931033,
						"acc_norm,none": 0.21551724137931033,
						"acc_norm_stderr,none": 0.03834279707285459,
						"acc_stderr,none": 0.03834279707285459,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.33793103448275863,
						"acc_norm,none": 0.33793103448275863,
						"acc_norm_stderr,none": 0.039417076320648906,
						"acc_stderr,none": 0.039417076320648906,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.0303239217431561,
						"acc_stderr,none": 0.0303239217431561,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.23222748815165878,
						"acc_norm,none": 0.23222748815165878,
						"acc_norm_stderr,none": 0.029138248623581754,
						"acc_stderr,none": 0.029138248623581754,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.23138297872340424,
						"acc_norm,none": 0.23138297872340424,
						"acc_norm_stderr,none": 0.021777351897815805,
						"acc_stderr,none": 0.021777351897815805,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.028490144114909487,
						"acc_stderr,none": 0.028490144114909487,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.23563218390804597,
						"acc_norm,none": 0.23563218390804597,
						"acc_norm_stderr,none": 0.03226602373932446,
						"acc_stderr,none": 0.03226602373932446,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.23703703703703705,
						"acc_norm,none": 0.23703703703703705,
						"acc_norm_stderr,none": 0.03673731683969506,
						"acc_stderr,none": 0.03673731683969506,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.24778761061946902,
						"acc_norm,none": 0.24778761061946902,
						"acc_norm_stderr,none": 0.02878185467292146,
						"acc_stderr,none": 0.02878185467292146,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.23030303030303031,
						"acc_norm,none": 0.23030303030303031,
						"acc_norm_stderr,none": 0.03287666758603488,
						"acc_stderr,none": 0.03287666758603488,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.1891891891891892,
						"acc_norm,none": 0.1891891891891892,
						"acc_norm_stderr,none": 0.0288734720923751,
						"acc_stderr,none": 0.0288734720923751,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2360248447204969,
						"acc_norm,none": 0.2360248447204969,
						"acc_norm_stderr,none": 0.03357055232967969,
						"acc_stderr,none": 0.03357055232967969,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": "cola",
						"mcc,none": 0.02416411533617769,
						"mcc_stderr,none": 0.030867197162684865
					},
					"copa": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.04943110704237101,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.472830948121646,
						"likelihood_diff_stderr,none": 0.05713403988641092,
						"pct_stereotype,none": 0.4992546213476446,
						"pct_stereotype_stderr,none": 0.0060375937570211775
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.7554412641621946,
						"likelihood_diff_stderr,none": 0.09600491679272966,
						"pct_stereotype,none": 0.5271317829457365,
						"pct_stereotype_stderr,none": 0.012195304721568219
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.151098901098901,
						"likelihood_diff_stderr,none": 0.2670006814069609,
						"pct_stereotype,none": 0.5494505494505495,
						"pct_stereotype_stderr,none": 0.052446231001012256
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 8.545454545454545,
						"likelihood_diff_stderr,none": 2.1801840471251146,
						"pct_stereotype,none": 0.45454545454545453,
						"pct_stereotype_stderr,none": 0.15745916432444335
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.142307692307693,
						"likelihood_diff_stderr,none": 0.613033745825391,
						"pct_stereotype,none": 0.6307692307692307,
						"pct_stereotype_stderr,none": 0.060324565928300454
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.96953125,
						"likelihood_diff_stderr,none": 0.19656640502366926,
						"pct_stereotype,none": 0.53125,
						"pct_stereotype_stderr,none": 0.0279398950447155
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.462384259259259,
						"likelihood_diff_stderr,none": 0.23041958400321652,
						"pct_stereotype,none": 0.4444444444444444,
						"pct_stereotype_stderr,none": 0.03388857118502325
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.6475694444444446,
						"likelihood_diff_stderr,none": 0.3371745952448907,
						"pct_stereotype,none": 0.5694444444444444,
						"pct_stereotype_stderr,none": 0.05876396677084613
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.6953740157480315,
						"likelihood_diff_stderr,none": 0.17265912546030981,
						"pct_stereotype,none": 0.4448818897637795,
						"pct_stereotype_stderr,none": 0.022070444592370703
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.231981981981982,
						"likelihood_diff_stderr,none": 0.5027255943918828,
						"pct_stereotype,none": 0.6486486486486487,
						"pct_stereotype_stderr,none": 0.04551758693625317
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.684139784946237,
						"likelihood_diff_stderr,none": 0.5240657922018894,
						"pct_stereotype,none": 0.7849462365591398,
						"pct_stereotype_stderr,none": 0.042835078355547535
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.056578947368421,
						"likelihood_diff_stderr,none": 0.24315063215401578,
						"pct_stereotype,none": 0.5789473684210527,
						"pct_stereotype_stderr,none": 0.035913425664502355
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 5.189326177698271,
						"likelihood_diff_stderr,none": 0.13258359890732652,
						"pct_stereotype,none": 0.47048300536672627,
						"pct_stereotype_stderr,none": 0.012191998897997571
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 4.813888888888889,
						"likelihood_diff_stderr,none": 0.4202386425773064,
						"pct_stereotype,none": 0.32222222222222224,
						"pct_stereotype_stderr,none": 0.04953662380574454
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 4.25,
						"likelihood_diff_stderr,none": 1.0801234497346435,
						"pct_stereotype,none": 0.38461538461538464,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 6.954545454545454,
						"likelihood_diff_stderr,none": 0.7132999964483727,
						"pct_stereotype,none": 0.5,
						"pct_stereotype_stderr,none": 0.06201736729460421
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.94898753894081,
						"likelihood_diff_stderr,none": 0.2012345148922979,
						"pct_stereotype,none": 0.5233644859813084,
						"pct_stereotype_stderr,none": 0.027920316348204993
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 6.923913043478261,
						"likelihood_diff_stderr,none": 0.3764618041224008,
						"pct_stereotype,none": 0.30039525691699603,
						"pct_stereotype_stderr,none": 0.028878367428103884
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 6.930555555555555,
						"likelihood_diff_stderr,none": 0.8718676078071622,
						"pct_stereotype,none": 0.4861111111111111,
						"pct_stereotype_stderr,none": 0.05931618532716555
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 4.139130434782609,
						"likelihood_diff_stderr,none": 0.23807741315904454,
						"pct_stereotype,none": 0.41956521739130437,
						"pct_stereotype_stderr,none": 0.023034039684727163
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 5.078260869565217,
						"likelihood_diff_stderr,none": 0.4867168181143798,
						"pct_stereotype,none": 0.6347826086956522,
						"pct_stereotype_stderr,none": 0.045095770252620675
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 8.35989010989011,
						"likelihood_diff_stderr,none": 0.8515876932984785,
						"pct_stereotype,none": 0.7142857142857143,
						"pct_stereotype_stderr,none": 0.04761904761904758
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 5.074617346938775,
						"likelihood_diff_stderr,none": 0.3361645272558743,
						"pct_stereotype,none": 0.5867346938775511,
						"pct_stereotype_stderr,none": 0.03526290219436087
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.003937007874015748,
						"exact_match_stderr,none": 0.0013895416930409094
					},
					"hellaswag": {
						"acc,none": 0.3027285401314479,
						"acc_norm,none": 0.33210515833499304,
						"acc_norm_stderr,none": 0.004700059671374634,
						"acc_stderr,none": 0.004584997935360437,
						"alias": "hellaswag"
					},
					"kobest": {
						"acc,none": 0.4757728568296426,
						"acc_stderr,none": 0.007301352107783242,
						"alias": "kobest",
						"f1,none": 0.35413473412910773,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.49928774928774927,
						"acc_stderr,none": 0.01334875395475671,
						"alias": " - kobest_boolq",
						"f1,none": 0.33923219312662556,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.505,
						"acc_stderr,none": 0.01581850894443666,
						"alias": " - kobest_copa",
						"f1,none": 0.4598894246349343,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.246,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.01889619359195206,
						"acc_stderr,none": 0.019279819056352555,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.23512452092882075,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.49370277078085645,
						"acc_stderr,none": 0.02512395255890725,
						"alias": " - kobest_sentineg",
						"f1,none": 0.3305227655986509,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.5119047619047619,
						"acc_stderr,none": 0.01408750246460405,
						"alias": " - kobest_wic",
						"f1,none": 0.3414740477548166,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.2786726178924898,
						"acc_stderr,none": 0.004394885803211236,
						"alias": "lambada",
						"perplexity,none": 1124.8999970233383,
						"perplexity_stderr,none": 83.20811103765554
					},
					"lambada_cloze": {
						"acc,none": 0.3291286629148069,
						"acc_stderr,none": 0.004628468145180275,
						"alias": "lambada_cloze",
						"perplexity,none": 336.3842993427113,
						"perplexity_stderr,none": 18.87420451903014
					},
					"lambada_multilingual": {
						"acc,none": 0.07149233456239085,
						"acc_stderr,none": 0.0015229333220403854,
						"alias": "lambada_multilingual",
						"perplexity,none": 12450564.061607182,
						"perplexity_stderr,none": 1249467.7816388514
					},
					"lambada_openai": {
						"acc,none": 0.23403842421890161,
						"acc_stderr,none": 0.005898738551589732,
						"alias": " - lambada_openai",
						"perplexity,none": 1917.7940256944985,
						"perplexity_stderr,none": 164.43054773390955
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.32117213273821077,
						"acc_stderr,none": 0.006505202676138961,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 309.3254212120804,
						"perplexity_stderr,none": 23.43687253753982
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.0335726761110033,
						"acc_stderr,none": 0.0025095141759726726,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 1035652.5718469626,
						"perplexity_stderr,none": 96379.3032728331
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.2342324859305259,
						"acc_stderr,none": 0.005900436024282866,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 1917.0163302100443,
						"perplexity_stderr,none": 164.39879805279188
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.023869590529788473,
						"acc_stderr,none": 0.002126613069613407,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 53293984.06375578,
						"perplexity_stderr,none": 6216459.118476964
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.03473704638074908,
						"acc_stderr,none": 0.0025511226364161796,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 2817136.237369704,
						"perplexity_stderr,none": 281430.8266170869
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.031049873859887445,
						"acc_stderr,none": 0.0024165328581618162,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 5104130.418733241,
						"perplexity_stderr,none": 544414.5064614916
					},
					"lambada_standard": {
						"acc,none": 0.323306811566078,
						"acc_stderr,none": 0.006516515049707148,
						"alias": " - lambada_standard",
						"perplexity,none": 332.00596835217794,
						"perplexity_stderr,none": 25.63111276838661
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.33708519309140306,
						"acc_stderr,none": 0.006585833859592014,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 363.4431774733422,
						"perplexity_stderr,none": 29.59147496431739
					},
					"logiqa": {
						"acc,none": 0.23195084485407066,
						"acc_norm,none": 0.2626728110599078,
						"acc_norm_stderr,none": 0.017261598347857544,
						"acc_stderr,none": 0.016555252497925898,
						"alias": "logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2595419847328244,
						"acc_norm,none": 0.294529262086514,
						"acc_norm_stderr,none": 0.011500471190116978,
						"acc_stderr,none": 0.011060275310259937,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.21943048576214405,
						"acc_norm,none": 0.21742043551088777,
						"acc_norm_stderr,none": 0.007551183476415311,
						"acc_stderr,none": 0.007576259919649273,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.6347172209277695,
						"acc_stderr,none": 0.004955594258681705,
						"alias": "mc_taco",
						"f1,none": 0.10112066718790721,
						"f1_stderr,none": 0.006687632895914634
					},
					"medmcqa": {
						"acc,none": 0.31173798709060485,
						"acc_norm,none": 0.31173798709060485,
						"acc_norm_stderr,none": 0.0071627492740507104,
						"acc_stderr,none": 0.0071627492740507104,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.27729772191673213,
						"acc_norm,none": 0.27729772191673213,
						"acc_norm_stderr,none": 0.012551895273228598,
						"acc_stderr,none": 0.012551895273228598,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.23543654750035609,
						"acc_stderr,none": 0.0035728818178558295,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322674,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.18518518518518517,
						"acc_stderr,none": 0.03355677216313143,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.18421052631578946,
						"acc_stderr,none": 0.0315469804508223,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.22641509433962265,
						"acc_stderr,none": 0.025757559893106744,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2569444444444444,
						"acc_stderr,none": 0.03653946969442099,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2023121387283237,
						"acc_stderr,none": 0.030631145539198823,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.04092563958237655,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2553191489361702,
						"acc_stderr,none": 0.028504856470514203,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.039994238792813365,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2206896551724138,
						"acc_stderr,none": 0.034559302019248124,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.21164021164021163,
						"acc_stderr,none": 0.021037331505262893,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.30158730158730157,
						"acc_stderr,none": 0.041049472699033945,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.039427724440366234,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.17419354838709677,
						"acc_stderr,none": 0.021576248184514552,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.15270935960591134,
						"acc_stderr,none": 0.025308904539380627,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.044084400227680794,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.22424242424242424,
						"acc_stderr,none": 0.03256866661681102,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.18686868686868688,
						"acc_stderr,none": 0.027772533334218984,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.20725388601036268,
						"acc_stderr,none": 0.029252823291803644,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2153846153846154,
						"acc_stderr,none": 0.020843034557462878,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2111111111111111,
						"acc_stderr,none": 0.02488211685765509,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.21428571428571427,
						"acc_stderr,none": 0.02665353159671549,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2119205298013245,
						"acc_stderr,none": 0.03336767086567977,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.1981651376146789,
						"acc_stderr,none": 0.017090573804217885,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.16203703703703703,
						"acc_stderr,none": 0.025130453652268455,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.03039153369274154,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.270042194092827,
						"acc_stderr,none": 0.028900721906293426,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3273542600896861,
						"acc_stderr,none": 0.03149384670994131,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2748091603053435,
						"acc_stderr,none": 0.03915345408847836,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2514346439957492,
						"acc_stderr,none": 0.0063197577518516014,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2396694214876033,
						"acc_stderr,none": 0.03896878985070417,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.28703703703703703,
						"acc_stderr,none": 0.043733130409147614,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2392638036809816,
						"acc_stderr,none": 0.0335195387952127,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841044,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.17475728155339806,
						"acc_stderr,none": 0.03760178006026619,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3162393162393162,
						"acc_stderr,none": 0.030463656747340247,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.24648786717752236,
						"acc_stderr,none": 0.015411308769686936,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2658959537572254,
						"acc_stderr,none": 0.023786203255508287,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574906,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.22875816993464052,
						"acc_stderr,none": 0.024051029739912255,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.24654007080785323,
						"acc_stderr,none": 0.0077079755124050495,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.18006430868167203,
						"acc_stderr,none": 0.021823422857744943,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.21296296296296297,
						"acc_stderr,none": 0.022779719088733396,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.23404255319148937,
						"acc_stderr,none": 0.025257861359432414,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2692307692307692,
						"acc_stderr,none": 0.01132873440314033,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.18382352941176472,
						"acc_stderr,none": 0.02352924218519311,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2565359477124183,
						"acc_stderr,none": 0.017667841612379002,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.22727272727272727,
						"acc_stderr,none": 0.04013964554072773,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.17959183673469387,
						"acc_stderr,none": 0.024573293589585637,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.22326941826454338,
						"acc_stderr,none": 0.007503203481714887,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.24875621890547264,
						"acc_stderr,none": 0.030567675938916707,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.21249603552172533,
						"acc_stderr,none": 0.00727642827845504,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421276,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.28313253012048195,
						"acc_stderr,none": 0.03507295431370519,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.2982456140350877,
						"acc_stderr,none": 0.03508771929824562,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.4226184411614875,
						"acc_stderr,none": 0.004986348741037276,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.43185516680227826,
						"acc_stderr,none": 0.004995739021640487,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7083333333333334,
						"acc_stderr,none": 0.022530199346874002,
						"alias": "mrpc",
						"f1,none": 0.7724665391969407,
						"f1_stderr,none": 0.020347041579257
					},
					"multimedqa": {
						"acc,none": 0.2942512420156139,
						"acc_stderr,none": 0.005412562220361505,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5138201320132013,
						"acc_stderr,none": 0.007179059189771656,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6044958638485614,
						"mrr_stderr,none": 0.010347630488766175,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407455,
						"r@2,none": 0.42776523702031605,
						"r@2_stderr,none": 0.016630994786546338
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.5766553822274133,
						"mrr_stderr,none": 0.01007944850497972,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750964,
						"r@2,none": 0.4932279909706546,
						"r@2_stderr,none": 0.01680577449500813
					},
					"openbookqa": {
						"acc,none": 0.14,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.019635965529725512,
						"acc_stderr,none": 0.015533272840269634,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.011117724672834362,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4975,
						"acc_stderr,none": 0.011182996230990781,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4625,
						"acc_stderr,none": 0.011151639095992297,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.453,
						"acc_stderr,none": 0.011133619300989873,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5585,
						"acc_stderr,none": 0.011106329288974695,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.011122493197456293,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.553,
						"acc_stderr,none": 0.011120131683767739,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5186428571428572,
						"acc_stderr,none": 0.004208100724663781,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.5930359085963003,
						"acc_norm,none": 0.5963003264417845,
						"acc_norm_stderr,none": 0.011447407541749091,
						"acc_stderr,none": 0.011462093919190166,
						"alias": "piqa"
					},
					"prost": {
						"acc,none": 0.3264837745516652,
						"acc_norm,none": 0.34206874466268145,
						"acc_norm_stderr,none": 0.003465932554948355,
						"acc_stderr,none": 0.003425926047424851,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.348,
						"acc_stderr,none": 0.0213237286328075,
						"alias": "pubmedqa"
					},
					"qa4mre": {
						"acc,none": 0.2801418439716312,
						"acc_norm,none": 0.3280141843971631,
						"acc_norm_stderr,none": 0.01980781501162305,
						"acc_stderr,none": 0.01889511788229077,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.24166666666666667,
						"acc_norm,none": 0.3416666666666667,
						"acc_norm_stderr,none": 0.04347611684317006,
						"acc_stderr,none": 0.039243250116912654,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.036342189215581536,
						"acc_stderr,none": 0.034049163262375844,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.31690140845070425,
						"acc_norm,none": 0.3380281690140845,
						"acc_norm_stderr,none": 0.028119201465363817,
						"acc_stderr,none": 0.02765734975848418,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.7752150832875709,
						"acc_stderr,none": 0.0056483141351066956,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.689092258224091,
						"acc_stderr,none": 0.002302013964886716,
						"alias": "qqp",
						"f1,none": 0.45037166593790995,
						"f1_stderr,none": 0.0041060298914963
					},
					"race": {
						"acc,none": 0.3167464114832536,
						"acc_stderr,none": 0.014397814139910621,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5812274368231047,
						"acc_stderr,none": 0.029696661081234824,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.882,
						"acc_norm,none": 0.879,
						"acc_norm_stderr,none": 0.01031821038094609,
						"acc_stderr,none": 0.010206869264381793,
						"alias": "sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5740072202166066,
						"acc_stderr,none": 0.02976495674177765,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8463302752293578,
						"acc_stderr,none": 0.012219544510178489,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.36853943816854945,
						"acc_norm,none": 0.44616615015495353,
						"acc_norm_stderr,none": 0.0035145423355798155,
						"acc_stderr,none": 0.0034107180746441056,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5654720308808359,
						"acc_stderr,none": 0.0028197305099225186,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5181290064102564,
						"acc_stderr,none": 0.005000964938709148,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.6828823350562481,
						"acc_stderr,none": 0.004685032426662151,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.49823529411764705,
						"acc_stderr,none": 0.004950949579298688,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.003937007874015748,
						"exact_match_stderr,none": 0.00138954169304091
					},
					"wic": {
						"acc,none": 0.542319749216301,
						"acc_stderr,none": 0.019739633283732762,
						"alias": "wic"
					},
					"winogrande": {
						"acc,none": 0.5043409629044988,
						"acc_stderr,none": 0.014051956064076892,
						"alias": "winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6057692307692307,
						"acc_stderr,none": 0.04815154775990712,
						"alias": "wsc"
					},
					"wsc273": {
						"acc,none": 0.5164835164835165,
						"acc_stderr,none": 0.0303004740355766,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5109090909090909,
						"acc_stderr,none": 0.006742573432585962,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.02238289498648353,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.022361396739207878,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668093,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.022311333245289663,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269955,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.02238235778196214,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.022382357781962132,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.34471218206157966,
						"acc_stderr,none": 0.002455367656551436,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3329317269076305,
						"acc_stderr,none": 0.009446051001358225,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.342570281124498,
						"acc_stderr,none": 0.009512333319470365,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.3477911646586345,
						"acc_stderr,none": 0.009546411769843137,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3329317269076305,
						"acc_stderr,none": 0.009446051001358225,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.44859437751004017,
						"acc_stderr,none": 0.009968964736894256,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.3401606425702811,
						"acc_stderr,none": 0.009496174608136405,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.009556642460138157,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667055,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939167,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3385542168674699,
						"acc_stderr,none": 0.009485250208516883,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.009448900914617612,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3313253012048193,
						"acc_stderr,none": 0.00943457405610196,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3369477911646586,
						"acc_stderr,none": 0.009474203778757722,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939167,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.009448900914617612,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.4840863967270321,
						"acc_stderr,none": 0.0038731184107945655,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.49636002647253474,
						"acc_stderr,none": 0.01286678434828923,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.5479814692256784,
						"acc_stderr,none": 0.012807742345189275,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.47782925215089345,
						"acc_stderr,none": 0.012854469625936086,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.4983454665784249,
						"acc_stderr,none": 0.012867054869163341,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.45334215751158174,
						"acc_stderr,none": 0.012810980537828172,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.46062210456651226,
						"acc_stderr,none": 0.012827159238891913,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4824619457313038,
						"acc_stderr,none": 0.012859207453266306,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.4784910655195235,
						"acc_stderr,none": 0.012855214257296597,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.4692256783587028,
						"acc_stderr,none": 0.012842730340585787,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.4877564526803441,
						"acc_stderr,none": 0.012863267059205548,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.47253474520185307,
						"acc_stderr,none": 0.012847698270388223,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.5088783996403686,
						"acc_stderr,none": 0.007498296937388405,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.5101075268817205,
						"acc_stderr,none": 0.010369628254978284,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.4939759036144578,
						"acc_stderr,none": 0.055211755360913765,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5182481751824818,
						"acc_stderr,none": 0.016143504549715058,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.49809885931558934,
						"acc_stderr,none": 0.030889879865535996,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5238095238095238,
						"acc_stderr,none": 0.028184622595998434,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.48412698412698413,
						"acc_stderr,none": 0.022282661258869584,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "google/flan-t5-base"
	},
	"google/flan-t5-large": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.47463359639233377,
						"acc_norm,none": 0.4819616685456595,
						"acc_norm_stderr,none": 0.008128759935576076,
						"acc_stderr,none": 0.008106432501793692,
						"alias": "ai2_arc"
					},
					"cmmlu": {
						"acc,none": 0.25194266965981693,
						"acc_norm,none": 0.25194266965981693,
						"acc_norm_stderr,none": 0.004043791565042548,
						"acc_stderr,none": 0.004043791565042548,
						"alias": "cmmlu"
					},
					"lambada_multilingual": {
						"acc,none": 0.05639433339802057,
						"acc_stderr,none": 0.0013909741418896373,
						"alias": "lambada_multilingual",
						"perplexity,none": 779460.5603023221,
						"perplexity_stderr,none": 50039.276154081854
					},
					"mmlu": {
						"acc,none": 0.3651189289275032,
						"acc_stderr,none": 0.003994986406348924,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3436769394261424,
						"acc_stderr,none": 0.006795941099625922,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.4119729642742195,
						"acc_stderr,none": 0.00867388247688649,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4026649333766656,
						"acc_stderr,none": 0.008744682637669685,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3143038376149699,
						"acc_stderr,none": 0.008235454104150565,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2757984386089425,
						"acc_stderr,none": 0.005271990141600472,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.004206844737909337,
						"alias": "pawsx"
					},
					"qa4mre": {
						"acc,none": 0.33865248226950356,
						"acc_norm,none": 0.38475177304964536,
						"acc_norm_stderr,none": 0.020500895567542685,
						"acc_stderr,none": 0.019935472266429496,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.7407740175035773,
						"acc_stderr,none": 0.0023543751463321262,
						"alias": "sycophancy"
					},
					"xcopa": {
						"acc,none": 0.5105454545454545,
						"acc_stderr,none": 0.006743054857948581,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.34195448460508704,
						"acc_stderr,none": 0.0024499095694181018,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.500511401239396,
						"acc_stderr,none": 0.0038494801673705768,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.5322544391998202,
						"acc_stderr,none": 0.007473335778920769,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.47463359639233377,
						"acc_norm,none": 0.4819616685456595,
						"acc_norm_stderr,none": 0.008128759935576076,
						"acc_stderr,none": 0.008106432501793692,
						"alias": "ai2_arc"
					},
					"arc_challenge": {
						"acc,none": 0.29266211604095566,
						"acc_norm,none": 0.3054607508532423,
						"acc_norm_stderr,none": 0.013460080478002505,
						"acc_stderr,none": 0.013295916103619422,
						"alias": " - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.5643939393939394,
						"acc_norm,none": 0.569023569023569,
						"acc_norm_stderr,none": 0.010161552863493751,
						"acc_stderr,none": 0.010174341733665222,
						"alias": " - arc_easy"
					},
					"cmmlu": {
						"acc,none": 0.25194266965981693,
						"acc_norm,none": 0.25194266965981693,
						"acc_norm_stderr,none": 0.004043791565042548,
						"acc_stderr,none": 0.004043791565042548,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.032793177922689494,
						"acc_stderr,none": 0.032793177922689494,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.036628698766429046,
						"acc_stderr,none": 0.036628698766429046,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364997,
						"acc_stderr,none": 0.03418746588364997,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.24401913875598086,
						"acc_norm,none": 0.24401913875598086,
						"acc_norm_stderr,none": 0.029780753228706106,
						"acc_stderr,none": 0.029780753228706106,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2595419847328244,
						"acc_norm,none": 0.2595419847328244,
						"acc_norm_stderr,none": 0.03844876139785271,
						"acc_stderr,none": 0.03844876139785271,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25735294117647056,
						"acc_norm,none": 0.25735294117647056,
						"acc_norm_stderr,none": 0.037626074966240076,
						"acc_stderr,none": 0.037626074966240076,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.04218811928205305,
						"acc_stderr,none": 0.04218811928205305,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25077399380804954,
						"acc_norm,none": 0.25077399380804954,
						"acc_norm_stderr,none": 0.024155705949743277,
						"acc_stderr,none": 0.024155705949743277,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604246,
						"acc_stderr,none": 0.030587591351604246,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.25139664804469275,
						"acc_norm,none": 0.25139664804469275,
						"acc_norm_stderr,none": 0.03251588837184109,
						"acc_stderr,none": 0.03251588837184109,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422644,
						"acc_stderr,none": 0.028146970599422644,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371222,
						"acc_stderr,none": 0.04198857662371222,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.04522350077382031,
						"acc_stderr,none": 0.04522350077382031,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.044801270921106716,
						"acc_stderr,none": 0.044801270921106716,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.039578354719809805,
						"acc_stderr,none": 0.039578354719809805,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.20952380952380953,
						"acc_norm,none": 0.20952380952380953,
						"acc_norm_stderr,none": 0.03990657150993187,
						"acc_stderr,none": 0.03990657150993187,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.04350546818999061,
						"acc_stderr,none": 0.04350546818999061,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.025825054502221032,
						"acc_stderr,none": 0.025825054502221032,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03039153369274154,
						"acc_stderr,none": 0.03039153369274154,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2573099415204678,
						"acc_norm,none": 0.2573099415204678,
						"acc_norm_stderr,none": 0.03352799844161865,
						"acc_stderr,none": 0.03352799844161865,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.03558926157606757,
						"acc_stderr,none": 0.03558926157606757,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.23741007194244604,
						"acc_norm,none": 0.23741007194244604,
						"acc_norm_stderr,none": 0.036220593237998276,
						"acc_stderr,none": 0.036220593237998276,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.25157232704402516,
						"acc_norm,none": 0.25157232704402516,
						"acc_norm_stderr,none": 0.034520558111649044,
						"acc_stderr,none": 0.034520558111649044,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25153374233128833,
						"acc_norm,none": 0.25153374233128833,
						"acc_norm_stderr,none": 0.03408997886857529,
						"acc_stderr,none": 0.03408997886857529,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2441860465116279,
						"acc_norm,none": 0.2441860465116279,
						"acc_norm_stderr,none": 0.03285260554707745,
						"acc_stderr,none": 0.03285260554707745,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.028394293050790498,
						"acc_stderr,none": 0.028394293050790498,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2474747474747475,
						"acc_norm,none": 0.2474747474747475,
						"acc_norm_stderr,none": 0.03074630074212451,
						"acc_stderr,none": 0.03074630074212451,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.27310924369747897,
						"acc_norm,none": 0.27310924369747897,
						"acc_norm_stderr,none": 0.028942004040998167,
						"acc_stderr,none": 0.028942004040998167,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.26956521739130435,
						"acc_norm,none": 0.26956521739130435,
						"acc_norm_stderr,none": 0.029322764228949524,
						"acc_stderr,none": 0.029322764228949524,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.037498507091740206,
						"acc_stderr,none": 0.037498507091740206,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.23776223776223776,
						"acc_norm,none": 0.23776223776223776,
						"acc_norm_stderr,none": 0.03572502141815571,
						"acc_stderr,none": 0.03572502141815571,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.24431818181818182,
						"acc_norm,none": 0.24431818181818182,
						"acc_norm_stderr,none": 0.03248092256353737,
						"acc_stderr,none": 0.03248092256353737,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2483221476510067,
						"acc_norm,none": 0.2483221476510067,
						"acc_norm_stderr,none": 0.035513440416974316,
						"acc_stderr,none": 0.035513440416974316,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516737,
						"acc_stderr,none": 0.03385633936516737,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.040255666847142615,
						"acc_stderr,none": 0.040255666847142615,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.041723430387053825,
						"acc_stderr,none": 0.041723430387053825,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.25874125874125875,
						"acc_norm,none": 0.25874125874125875,
						"acc_norm_stderr,none": 0.03675137438900237,
						"acc_stderr,none": 0.03675137438900237,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604674,
						"acc_stderr,none": 0.03893259610604674,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.03186439492581517,
						"acc_stderr,none": 0.03186439492581517,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25304136253041365,
						"acc_norm,none": 0.25304136253041365,
						"acc_norm_stderr,none": 0.021470991853398305,
						"acc_stderr,none": 0.021470991853398305,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.029761395837435978,
						"acc_stderr,none": 0.029761395837435978,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.25203252032520324,
						"acc_norm,none": 0.25203252032520324,
						"acc_norm_stderr,none": 0.039308795268239924,
						"acc_stderr,none": 0.039308795268239924,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.23770491803278687,
						"acc_norm,none": 0.23770491803278687,
						"acc_norm_stderr,none": 0.038697949843811565,
						"acc_stderr,none": 0.038697949843811565,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.0298564231646719,
						"acc_stderr,none": 0.0298564231646719,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032364888900157734,
						"acc_stderr,none": 0.032364888900157734,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.031524802348711634,
						"acc_stderr,none": 0.031524802348711634,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.04083221538649575,
						"acc_stderr,none": 0.04083221538649575,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.03565998174135302,
						"acc_stderr,none": 0.03565998174135302,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055043,
						"acc_stderr,none": 0.04232473532055043,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.24571428571428572,
						"acc_norm,none": 0.24571428571428572,
						"acc_norm_stderr,none": 0.03263687142627841,
						"acc_stderr,none": 0.03263687142627841,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26066350710900477,
						"acc_norm,none": 0.26066350710900477,
						"acc_norm_stderr,none": 0.030293645661742804,
						"acc_stderr,none": 0.030293645661742804,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2473404255319149,
						"acc_norm,none": 0.2473404255319149,
						"acc_norm_stderr,none": 0.022280822212812246,
						"acc_stderr,none": 0.022280822212812246,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.24568965517241378,
						"acc_norm,none": 0.24568965517241378,
						"acc_norm_stderr,none": 0.028324514684171128,
						"acc_stderr,none": 0.028324514684171128,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2471264367816092,
						"acc_norm,none": 0.2471264367816092,
						"acc_norm_stderr,none": 0.03279424038543969,
						"acc_stderr,none": 0.03279424038543969,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174021,
						"acc_stderr,none": 0.03749850709174021,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.028952167450890805,
						"acc_stderr,none": 0.028952167450890805,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03346409881055953,
						"acc_stderr,none": 0.03346409881055953,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.031629303956979486,
						"acc_stderr,none": 0.031629303956979486,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.033603007963315265,
						"acc_stderr,none": 0.033603007963315265,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2422360248447205,
						"acc_norm,none": 0.2422360248447205,
						"acc_norm_stderr,none": 0.033870869961530825,
						"acc_stderr,none": 0.033870869961530825,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865142,
						"acc_stderr,none": 0.03462157845865142,
						"alias": " - cmmlu_world_religions"
					},
					"lambada_multilingual": {
						"acc,none": 0.05639433339802057,
						"acc_stderr,none": 0.0013909741418896373,
						"alias": "lambada_multilingual",
						"perplexity,none": 779460.5603023221,
						"perplexity_stderr,none": 50039.276154081854
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.028721133320395886,
						"acc_stderr,none": 0.0023269380914502317,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 323133.8211584657,
						"perplexity_stderr,none": 26520.898136757452
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.17232679992237532,
						"acc_stderr,none": 0.005261601237611626,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 7236.258555446602,
						"perplexity_stderr,none": 628.685677687046
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.02076460314379973,
						"acc_stderr,none": 0.0019866320715202592,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 2832088.5554328295,
						"perplexity_stderr,none": 244683.35362500805
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.028333009897147293,
						"acc_stderr,none": 0.002311623782601571,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 275741.8472256802,
						"perplexity_stderr,none": 21505.579963330852
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.03182612070638463,
						"acc_stderr,none": 0.002445572861351714,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 459102.3191391894,
						"perplexity_stderr,none": 39522.67901104466
					},
					"logiqa2": {
						"acc,none": 0.31806615776081426,
						"acc_norm,none": 0.32697201017811706,
						"acc_norm_stderr,none": 0.011835422313897942,
						"acc_stderr,none": 0.011750105354514661,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.23919597989949748,
						"acc_norm,none": 0.23785594639865998,
						"acc_norm_stderr,none": 0.007794282274854809,
						"acc_stderr,none": 0.007809332748857678,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.6678669773353103,
						"acc_stderr,none": 0.004847212007249936,
						"alias": "mc_taco",
						"f1,none": 0.06777645659928656,
						"f1_stderr,none": 0.006016346742040349
					},
					"medmcqa": {
						"acc,none": 0.2295003585943103,
						"acc_norm,none": 0.2295003585943103,
						"acc_norm_stderr,none": 0.006502582792591477,
						"acc_stderr,none": 0.006502582792591477,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.30557737627651216,
						"acc_norm,none": 0.30557737627651216,
						"acc_norm_stderr,none": 0.012916027881886082,
						"acc_stderr,none": 0.012916027881886082,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.3651189289275032,
						"acc_stderr,none": 0.003994986406348924,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.042295258468165044,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.35555555555555557,
						"acc_stderr,none": 0.04135176749720386,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3157894736842105,
						"acc_stderr,none": 0.03782728980865469,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.050251890762960605,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.42641509433962266,
						"acc_stderr,none": 0.03043779434298305,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.039420826399272135,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.048783173121456316,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.37572254335260113,
						"acc_stderr,none": 0.036928207672648664,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.04533838195929774,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.05009082659620332,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.33617021276595743,
						"acc_stderr,none": 0.030881618520676942,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.20175438596491227,
						"acc_stderr,none": 0.03775205013583638,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.3103448275862069,
						"acc_stderr,none": 0.03855289616378948,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24867724867724866,
						"acc_stderr,none": 0.022261817692400175,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.30158730158730157,
						"acc_stderr,none": 0.04104947269903394,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.38064516129032255,
						"acc_stderr,none": 0.02762171783290704,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.26108374384236455,
						"acc_stderr,none": 0.030903796952114475,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5757575757575758,
						"acc_stderr,none": 0.03859268142070265,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.41919191919191917,
						"acc_stderr,none": 0.035155207286704175,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.45595854922279794,
						"acc_stderr,none": 0.035944137112724366,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.35384615384615387,
						"acc_stderr,none": 0.024243783994062153,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2518518518518518,
						"acc_stderr,none": 0.026466117538959905,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.38235294117647056,
						"acc_stderr,none": 0.03156663099215416,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.304635761589404,
						"acc_stderr,none": 0.037579499229433426,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.48807339449541287,
						"acc_stderr,none": 0.021431223617362233,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.032149521478027486,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.46078431372549017,
						"acc_stderr,none": 0.03498501649369527,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5189873417721519,
						"acc_stderr,none": 0.03252375148090447,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.34977578475336324,
						"acc_stderr,none": 0.03200736719484503,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.37404580152671757,
						"acc_stderr,none": 0.04243869242230524,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3436769394261424,
						"acc_stderr,none": 0.006795941099625922,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.48760330578512395,
						"acc_stderr,none": 0.04562951548180765,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.37962962962962965,
						"acc_stderr,none": 0.04691521224077742,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.4785276073619632,
						"acc_stderr,none": 0.03924746876751129,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2767857142857143,
						"acc_stderr,none": 0.042466243366976256,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5728155339805825,
						"acc_stderr,none": 0.04897957737781168,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6025641025641025,
						"acc_stderr,none": 0.03205953453789293,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4840357598978289,
						"acc_stderr,none": 0.01787084750608173,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.41040462427745666,
						"acc_stderr,none": 0.026483392042098187,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.22905027932960895,
						"acc_stderr,none": 0.014054314935614572,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.38562091503267976,
						"acc_stderr,none": 0.027870745278290303,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.4119729642742195,
						"acc_stderr,none": 0.00867388247688649,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.36977491961414793,
						"acc_stderr,none": 0.027417996705630995,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3549382716049383,
						"acc_stderr,none": 0.026624152478845853,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2695035460992908,
						"acc_stderr,none": 0.026469036818590634,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.29726205997392435,
						"acc_stderr,none": 0.011673346173086066,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3125,
						"acc_stderr,none": 0.02815637344037142,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.315359477124183,
						"acc_stderr,none": 0.018798086284886887,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4636363636363636,
						"acc_stderr,none": 0.04776449162396197,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.42448979591836733,
						"acc_stderr,none": 0.031642094879429414,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4026649333766656,
						"acc_stderr,none": 0.008744682637669685,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5174129353233831,
						"acc_stderr,none": 0.03533389234739245,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3143038376149699,
						"acc_stderr,none": 0.008235454104150565,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.05024183937956912,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.0371172519074075,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.32748538011695905,
						"acc_stderr,none": 0.03599335771456027,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.6362710137544575,
						"acc_stderr,none": 0.004856093036296407,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.6388323840520749,
						"acc_stderr,none": 0.004844500609964595,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.803921568627451,
						"acc_stderr,none": 0.019679975237883437,
						"alias": "mrpc",
						"f1,none": 0.8713826366559485,
						"f1_stderr,none": 0.014266852804266204
					},
					"multimedqa": {
						"acc,none": 0.2757984386089425,
						"acc_stderr,none": 0.005271990141600472,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5678630363036303,
						"acc_stderr,none": 0.007115345587627509,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6652558333246756,
						"mrr_stderr,none": 0.010503464218100913,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407474,
						"r@2,none": 0.40970654627539504,
						"r@2_stderr,none": 0.01653098758467983
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6261286700215889,
						"mrr_stderr,none": 0.010483457838398796,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750955,
						"r@2,none": 0.4672686230248307,
						"r@2_stderr,none": 0.016771264669080584
					},
					"openbookqa": {
						"acc,none": 0.188,
						"acc_norm,none": 0.31,
						"acc_norm_stderr,none": 0.020704041021724805,
						"acc_stderr,none": 0.01749067888034625,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.011122493197456293,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.5085,
						"acc_stderr,none": 0.011181519941139164,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4515,
						"acc_stderr,none": 0.01113040061763076,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.4505,
						"acc_stderr,none": 0.011128198119942876,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.011107641056719634,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5525,
						"acc_stderr,none": 0.011121318125943089,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.553,
						"acc_stderr,none": 0.011120131683767739,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.004206844737909337,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7105549510337323,
						"acc_norm,none": 0.7263329706202394,
						"acc_norm_stderr,none": 0.010402184206229213,
						"acc_stderr,none": 0.010581014740675607,
						"alias": "piqa"
					},
					"prost": {
						"acc,none": 0.3688620836891546,
						"acc_norm,none": 0.37889624252775406,
						"acc_norm_stderr,none": 0.0035441768034476713,
						"acc_stderr,none": 0.0035250663410975325,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.416,
						"acc_stderr,none": 0.02206494331392886,
						"alias": "pubmedqa"
					},
					"qa4mre": {
						"acc,none": 0.33865248226950356,
						"acc_norm,none": 0.38475177304964536,
						"acc_norm_stderr,none": 0.020500895567542685,
						"acc_stderr,none": 0.019935472266429496,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.31666666666666665,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.0451938453788867,
						"acc_stderr,none": 0.042642631535546364,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3375,
						"acc_norm_stderr,none": 0.03749999999999997,
						"acc_stderr,none": 0.03634218921558155,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.36971830985915494,
						"acc_norm,none": 0.397887323943662,
						"acc_norm_stderr,none": 0.029095492917064897,
						"acc_stderr,none": 0.028695223203150086,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.8251876258466044,
						"acc_stderr,none": 0.005139094349718385,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.8183774424931981,
						"acc_stderr,none": 0.00191741101892525,
						"alias": "qqp",
						"f1,none": 0.778858605631682,
						"f1_stderr,none": 0.0025182265375427756
					},
					"race": {
						"acc,none": 0.37894736842105264,
						"acc_stderr,none": 0.015014241655133454,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.7942238267148014,
						"acc_stderr,none": 0.024334053478024743,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.931,
						"acc_norm,none": 0.928,
						"acc_norm_stderr,none": 0.008178195576218681,
						"acc_stderr,none": 0.00801893405031517,
						"alias": "sciq"
					},
					"sglue_rte": {
						"acc,none": 0.7978339350180506,
						"acc_stderr,none": 0.02417440759219474,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.9380733944954128,
						"acc_stderr,none": 0.008166725706554192,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.4370188943317005,
						"acc_norm,none": 0.5521343596920923,
						"acc_norm_stderr,none": 0.0035158228303530607,
						"acc_stderr,none": 0.0035069351815865653,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.7407740175035773,
						"acc_stderr,none": 0.0023543751463321262,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6952123397435898,
						"acc_stderr,none": 0.004607092059695132,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.9579406101145231,
						"acc_stderr,none": 0.0020208320499054233,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5752941176470588,
						"acc_stderr,none": 0.0048945222703045705,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.22096456692913385,
						"exact_match_stderr,none": 0.009206296602858774
					},
					"wic": {
						"acc,none": 0.6018808777429467,
						"acc_stderr,none": 0.019395102343077997,
						"alias": "wic"
					},
					"winogrande": {
						"acc,none": 0.5169692186266772,
						"acc_stderr,none": 0.014044390401612978,
						"alias": "winogrande"
					},
					"wnli": {
						"acc,none": 0.6056338028169014,
						"acc_stderr,none": 0.058412510854444266,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.7115384615384616,
						"acc_stderr,none": 0.04464003593905588,
						"alias": "wsc"
					},
					"wsc273": {
						"acc,none": 0.5604395604395604,
						"acc_stderr,none": 0.030094646016767413,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5105454545454545,
						"acc_stderr,none": 0.006743054857948581,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.022378596989230785,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.022374298166353175,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044296,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.022324981738385256,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.02238235778196213,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.022311333245289666,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.022382357781962126,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.34195448460508704,
						"acc_stderr,none": 0.0024499095694181018,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.009448900914617612,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3305220883534137,
						"acc_stderr,none": 0.009428789109289832,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939166,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667055,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.45502008032128516,
						"acc_stderr,none": 0.009981437307797261,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939167,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939167,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.009448900914617612,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.009448900914617612,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.348995983935743,
						"acc_stderr,none": 0.009554095988300676,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.009448900914617612,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.009448900914617605,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3248995983935743,
						"acc_stderr,none": 0.00938742158168576,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3325301204819277,
						"acc_stderr,none": 0.009443193365903347,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.009448900914617612,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.500511401239396,
						"acc_stderr,none": 0.0038494801673705768,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.47650562541363334,
						"acc_stderr,none": 0.012852912530051752,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.6915949702183984,
						"acc_stderr,none": 0.011884972073313783,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5095962938451357,
						"acc_stderr,none": 0.012864755260408957,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5095962938451357,
						"acc_stderr,none": 0.012864755260408957,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.46591661151555264,
						"acc_stderr,none": 0.012837195610619431,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.4804765056254136,
						"acc_stderr,none": 0.012857312531836857,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4831237590999338,
						"acc_stderr,none": 0.012859793919977606,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.4811383189940437,
						"acc_stderr,none": 0.012857966762464998,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.47121111846459296,
						"acc_stderr,none": 0.012845779070719505,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.4639311714096625,
						"acc_stderr,none": 0.012833602406620011,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.47253474520185307,
						"acc_stderr,none": 0.012847698270388222,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.5322544391998202,
						"acc_stderr,none": 0.007473335778920769,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.556989247311828,
						"acc_stderr,none": 0.010304157243242698,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.4939759036144578,
						"acc_stderr,none": 0.05521175536091375,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5130344108446299,
						"acc_stderr,none": 0.016148776724612655,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.4790874524714829,
						"acc_stderr,none": 0.030863072709687606,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5333333333333333,
						"acc_stderr,none": 0.028153858945648896,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.022287578075447474,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "google/flan-t5-large"
	},
	"google/gemma-2b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"lambada_multilingual": {
						"acc,none": 0.15808267028915196,
						"acc_stderr,none": 0.0022555304151213183,
						"alias": "lambada_multilingual",
						"perplexity,none": 8686.588767126095,
						"perplexity_stderr,none": 445.74297749194534
					},
					"qa4mre": {
						"acc,none": 0.375886524822695,
						"acc_norm,none": 0.41843971631205673,
						"acc_norm_stderr,none": 0.02065172799601949,
						"acc_stderr,none": 0.02042281403781797,
						"alias": "qa4mre"
					},
					"xnli": {
						"acc,none": 0.33801874163319945,
						"acc_stderr,none": 0.002447506903831831,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5094759641417483,
						"acc_stderr,none": 0.0038756496638598916,
						"alias": "xstorycloze"
					}
				},
				"results": {
					"lambada_multilingual": {
						"acc,none": 0.15808267028915196,
						"acc_stderr,none": 0.0022555304151213183,
						"alias": "lambada_multilingual",
						"perplexity,none": 8686.588767126095,
						"perplexity_stderr,none": 445.74297749194534
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.12827479138366,
						"acc_stderr,none": 0.00465878355619086,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 8911.688408919335,
						"perplexity_stderr,none": 857.8170504104418
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.23617310304676886,
						"acc_stderr,none": 0.0059173160274042045,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 380.5824710396832,
						"perplexity_stderr,none": 26.623634426217478
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.10421113914224724,
						"acc_stderr,none": 0.004256689511125028,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 18310.31179637831,
						"perplexity_stderr,none": 1690.9970684036114
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.1731030467688725,
						"acc_stderr,none": 0.005270964905423159,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 4211.875977856614,
						"perplexity_stderr,none": 362.8721170953673
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.14865127110421114,
						"acc_stderr,none": 0.004956214938690635,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 11618.48518143653,
						"perplexity_stderr,none": 1113.312121996706
					},
					"qa4mre": {
						"acc,none": 0.375886524822695,
						"acc_norm,none": 0.41843971631205673,
						"acc_norm_stderr,none": 0.02065172799601949,
						"acc_stderr,none": 0.02042281403781797,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4,
						"acc_norm,none": 0.525,
						"acc_norm_stderr,none": 0.04577759534198058,
						"acc_stderr,none": 0.04490887131390718,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.0393415738622931,
						"acc_stderr,none": 0.03749999999999997,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3873239436619718,
						"acc_norm,none": 0.3626760563380282,
						"acc_norm_stderr,none": 0.028578954826942813,
						"acc_stderr,none": 0.02895738957595097,
						"alias": " - qa4mre_2013"
					},
					"rte": {
						"acc,none": 0.516245487364621,
						"acc_stderr,none": 0.030080573208738064,
						"alias": "rte"
					},
					"winogrande": {
						"acc,none": 0.5035516969218626,
						"acc_stderr,none": 0.01405213114691586,
						"alias": "winogrande"
					},
					"xnli": {
						"acc,none": 0.33801874163319945,
						"acc_stderr,none": 0.002447506903831831,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3273092369477912,
						"acc_stderr,none": 0.00940533815661493,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3397590361445783,
						"acc_stderr,none": 0.009493454925438252,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.3457831325301205,
						"acc_stderr,none": 0.009533455033752764,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3385542168674699,
						"acc_stderr,none": 0.009485250208516881,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.3618473895582329,
						"acc_stderr,none": 0.00963191294489075,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.3421686746987952,
						"acc_stderr,none": 0.009509659143015629,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.3273092369477912,
						"acc_stderr,none": 0.009405338156614929,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3261044176706827,
						"acc_stderr,none": 0.009396415172722673,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.3409638554216867,
						"acc_stderr,none": 0.009501591178361541,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3273092369477912,
						"acc_stderr,none": 0.00940533815661493,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3385542168674699,
						"acc_stderr,none": 0.009485250208516881,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.344578313253012,
						"acc_stderr,none": 0.009525590900110657,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3273092369477912,
						"acc_stderr,none": 0.009405338156614927,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.009593947957927137,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3273092369477912,
						"acc_stderr,none": 0.009405338156614929,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5094759641417483,
						"acc_stderr,none": 0.0038756496638598916,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.48643282594308407,
						"acc_stderr,none": 0.012862387586650075,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.5545996029119789,
						"acc_stderr,none": 0.012790178438084812,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5168762409000662,
						"acc_stderr,none": 0.012859793919977606,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.500330906684315,
						"acc_stderr,none": 0.012867122498493424,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5208471211118465,
						"acc_stderr,none": 0.012855936282881267,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.4943745863666446,
						"acc_stderr,none": 0.012866310923072515,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4811383189940437,
						"acc_stderr,none": 0.012857966762464996,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5049636002647253,
						"acc_stderr,none": 0.012866491277589948,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5016545334215751,
						"acc_stderr,none": 0.012867054869163343,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5347452018530774,
						"acc_stderr,none": 0.01283602058540743,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5082726671078756,
						"acc_stderr,none": 0.012865364020375396,
						"alias": " - xstorycloze_zh"
					}
				}
			}
		},
		"name": "google/gemma-2b"
	},
	"google/gemma-2b-it": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"mmlu": {
						"acc,none": 0.28578550064093433,
						"acc_stderr,none": 0.0037954907591598868,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2973432518597237,
						"acc_stderr,none": 0.006629538721842289,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2922433215320245,
						"acc_stderr,none": 0.008143178426573382,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.29054273643158923,
						"acc_stderr,none": 0.008143333254705903,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2575325087218522,
						"acc_stderr,none": 0.007765932826821019,
						"alias": " - stem"
					},
					"xwinograd": {
						"acc,none": 0.5342773657001574,
						"acc_stderr,none": 0.007480615038502453,
						"alias": "xwinograd"
					}
				},
				"results": {
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"logiqa2": {
						"acc,none": 0.24491094147582698,
						"acc_norm,none": 0.26145038167938933,
						"acc_norm_stderr,none": 0.0110865491471325,
						"acc_stderr,none": 0.010849634050074235,
						"alias": "logiqa2"
					},
					"mmlu": {
						"acc,none": 0.28578550064093433,
						"acc_stderr,none": 0.0037954907591598868,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.22962962962962963,
						"acc_stderr,none": 0.036333844140734664,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.03459777606810537,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768077,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.28679245283018867,
						"acc_stderr,none": 0.027834912527544074,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2708333333333333,
						"acc_stderr,none": 0.03716177437566016,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2774566473988439,
						"acc_stderr,none": 0.03414014007044036,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.20588235294117646,
						"acc_stderr,none": 0.040233822736177476,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2851063829787234,
						"acc_stderr,none": 0.029513196625539355,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.19298245614035087,
						"acc_stderr,none": 0.037124548537213684,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.3931034482758621,
						"acc_stderr,none": 0.0407032901370707,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2275132275132275,
						"acc_stderr,none": 0.021591269407823774,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.24603174603174602,
						"acc_stderr,none": 0.03852273364924316,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2967741935483871,
						"acc_stderr,none": 0.025988500792411894,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.21182266009852216,
						"acc_stderr,none": 0.028748983689941072,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.34545454545454546,
						"acc_stderr,none": 0.037131580674819135,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.29292929292929293,
						"acc_stderr,none": 0.032424979581788145,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.25906735751295334,
						"acc_stderr,none": 0.03161877917935411,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3153846153846154,
						"acc_stderr,none": 0.023559646983189932,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23333333333333334,
						"acc_stderr,none": 0.025787874220959316,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.25630252100840334,
						"acc_stderr,none": 0.02835962087053395,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2185430463576159,
						"acc_stderr,none": 0.033742355504256936,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.25688073394495414,
						"acc_stderr,none": 0.01873249292834245,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.18981481481481483,
						"acc_stderr,none": 0.026744714834691954,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.3382352941176471,
						"acc_stderr,none": 0.0332057461294543,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.4177215189873418,
						"acc_stderr,none": 0.03210353032241268,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.2825112107623318,
						"acc_stderr,none": 0.030216831011508766,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.37404580152671757,
						"acc_stderr,none": 0.04243869242230524,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2973432518597237,
						"acc_stderr,none": 0.006629538721842289,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4214876033057851,
						"acc_stderr,none": 0.045077322787750944,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3611111111111111,
						"acc_stderr,none": 0.04643454608906274,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.27607361963190186,
						"acc_stderr,none": 0.0351238528370505,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3125,
						"acc_stderr,none": 0.043994650575715215,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.27184466019417475,
						"acc_stderr,none": 0.044052680241409216,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3717948717948718,
						"acc_stderr,none": 0.031660988918880785,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2950191570881226,
						"acc_stderr,none": 0.016308363772932724,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3236994219653179,
						"acc_stderr,none": 0.025190181327608408,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24022346368715083,
						"acc_stderr,none": 0.014288343803925307,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.32679738562091504,
						"acc_stderr,none": 0.02685729466328141,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2922433215320245,
						"acc_stderr,none": 0.008143178426573382,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2572347266881029,
						"acc_stderr,none": 0.024826171289250888,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2716049382716049,
						"acc_stderr,none": 0.024748624490537375,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2730496453900709,
						"acc_stderr,none": 0.02657786094330785,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2894393741851369,
						"acc_stderr,none": 0.011582659702210254,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.22058823529411764,
						"acc_stderr,none": 0.025187786660227276,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2679738562091503,
						"acc_stderr,none": 0.017917974069594726,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2818181818181818,
						"acc_stderr,none": 0.043091187099464606,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3469387755102041,
						"acc_stderr,none": 0.030472526026726496,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.29054273643158923,
						"acc_stderr,none": 0.008143333254705903,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.3034825870646766,
						"acc_stderr,none": 0.03251006816458618,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2575325087218522,
						"acc_stderr,none": 0.007765932826821019,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.050251890762960605,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3674698795180723,
						"acc_stderr,none": 0.03753267402120575,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.40350877192982454,
						"acc_stderr,none": 0.03762738699917056,
						"alias": "  - world_religions"
					},
					"webqs": {
						"alias": " - webqs",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"xwinograd": {
						"acc,none": 0.5342773657001574,
						"acc_stderr,none": 0.007480615038502453,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.5410752688172042,
						"acc_stderr,none": 0.010336690459753396,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.5060240963855421,
						"acc_stderr,none": 0.05521175536091375,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5109489051094891,
						"acc_stderr,none": 0.01615039318009044,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.5285171102661597,
						"acc_stderr,none": 0.030839820992717426,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5555555555555556,
						"acc_stderr,none": 0.0280419147291705,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.5416666666666666,
						"acc_stderr,none": 0.022216353875034702,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "google/gemma-2b-it"
	},
	"google/gemma-7b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.25366403607666294,
						"acc_norm,none": 0.2615558060879369,
						"acc_norm_stderr,none": 0.007365382078023482,
						"acc_stderr,none": 0.007273449212895264,
						"alias": "ai2_arc"
					},
					"anli": {
						"acc,none": 0.33625,
						"acc_stderr,none": 0.008354126064223918,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.5762089552238806,
						"acc_stderr,none": 0.0018018618909635073,
						"alias": "blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2555720653789004,
						"acc_norm,none": 0.2555720653789004,
						"acc_norm_stderr,none": 0.011832134124595545,
						"acc_stderr,none": 0.011832134124595545,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.23743740286651702,
						"acc_norm,none": 0.23743740286651702,
						"acc_norm_stderr,none": 0.003955069878474568,
						"acc_stderr,none": 0.003955069878474568,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 13.177251043530113,
						"likelihood_diff_stderr,none": 0.17239541498920263,
						"pct_stereotype,none": 0.44991055456171736,
						"pct_stereotype_stderr,none": 0.006025660934263797
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"kobest": {
						"acc,none": 0.4709493532120149,
						"acc_stderr,none": 0.007333438274771031,
						"alias": "kobest",
						"f1,none": 0.37450243633981517,
						"f1_stderr,none": "N/A"
					},
					"lambada_multilingual": {
						"acc,none": 0.004346982340384243,
						"acc_stderr,none": 0.0004098404757155364,
						"alias": "lambada_multilingual",
						"perplexity,none": 95048329591.87683,
						"perplexity_stderr,none": 11493246193.665659
					},
					"mmlu": {
						"acc,none": 0.2469733656174334,
						"acc_stderr,none": 0.003636673741729959,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2452709883103082,
						"acc_stderr,none": 0.0062727475783044976,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.23688445445767622,
						"acc_stderr,none": 0.007619024706686291,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.25804354891127723,
						"acc_stderr,none": 0.007871590459686583,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.24865207738661593,
						"acc_stderr,none": 0.007701731877240684,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.25691980127750175,
						"acc_stderr,none": 0.005109843318904429,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5217142857142857,
						"acc_stderr,none": 0.0042185680657789324,
						"alias": "pawsx"
					},
					"qa4mre": {
						"acc,none": 0.14716312056737588,
						"acc_norm,none": 0.18262411347517732,
						"acc_norm_stderr,none": 0.01624501221316162,
						"acc_stderr,none": 0.01493662217402647,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5438754117999401,
						"acc_stderr,none": 0.0028497926862090625,
						"alias": "sycophancy"
					},
					"xcopa": {
						"acc,none": 0.5176363636363637,
						"acc_stderr,none": 0.006739757879616855,
						"alias": "xcopa"
					},
					"xstorycloze": {
						"acc,none": 0.47488117441790506,
						"acc_stderr,none": 0.003872459803504528,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.5032591593616543,
						"acc_stderr,none": 0.007499652918045967,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.25366403607666294,
						"acc_norm,none": 0.2615558060879369,
						"acc_norm_stderr,none": 0.007365382078023482,
						"acc_stderr,none": 0.007273449212895264,
						"alias": "ai2_arc"
					},
					"anli": {
						"acc,none": 0.33625,
						"acc_stderr,none": 0.008354126064223918,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.327,
						"acc_stderr,none": 0.014842213153411247,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.334,
						"acc_stderr,none": 0.014922019523732956,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3458333333333333,
						"acc_stderr,none": 0.013736245342311016,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.1945392491467577,
						"acc_norm,none": 0.22184300341296928,
						"acc_norm_stderr,none": 0.012141659068147882,
						"acc_stderr,none": 0.011567709174648727,
						"alias": " - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.2828282828282828,
						"acc_norm,none": 0.28114478114478114,
						"acc_norm_stderr,none": 0.009224735470286996,
						"acc_stderr,none": 0.009241472775328222,
						"alias": " - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.5762089552238806,
						"acc_stderr,none": 0.0018018618909635073,
						"alias": "blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.543,
						"acc_stderr,none": 0.015760691590136384,
						"alias": " - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.015356947477797572,
						"alias": " - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.647,
						"acc_stderr,none": 0.015120172605483696,
						"alias": " - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.015768596914394382,
						"alias": " - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.768,
						"acc_stderr,none": 0.013354937452281569,
						"alias": " - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.515,
						"acc_stderr,none": 0.015812179641814895,
						"alias": " - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.511,
						"acc_stderr,none": 0.015815471195292686,
						"alias": " - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.01460648312734276,
						"alias": " - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.363,
						"acc_stderr,none": 0.015213890444671281,
						"alias": " - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.664,
						"acc_stderr,none": 0.014944140233795023,
						"alias": " - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.627,
						"acc_stderr,none": 0.01530049362292281,
						"alias": " - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.618,
						"acc_stderr,none": 0.015372453034968524,
						"alias": " - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.648,
						"acc_stderr,none": 0.015110404505648663,
						"alias": " - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.015316971293620996,
						"alias": " - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.644,
						"acc_stderr,none": 0.015149042659306625,
						"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.634,
						"acc_stderr,none": 0.015240612726405747,
						"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.015387682761897071,
						"alias": " - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.015356947477797577,
						"alias": " - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.015635487471405182,
						"alias": " - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.677,
						"acc_stderr,none": 0.014794927843348642,
						"alias": " - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.015819173374302702,
						"alias": " - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.01401329270272948,
						"alias": " - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.634,
						"acc_stderr,none": 0.015240612726405754,
						"alias": " - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.845,
						"acc_stderr,none": 0.011450157470799485,
						"alias": " - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.014933117490932575,
						"alias": " - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.507,
						"acc_stderr,none": 0.01581774956184357,
						"alias": " - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.015635487471405182,
						"alias": " - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.479,
						"acc_stderr,none": 0.015805341148131296,
						"alias": " - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.015572363292015104,
						"alias": " - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.507,
						"acc_stderr,none": 0.015817749561843574,
						"alias": " - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.647,
						"acc_stderr,none": 0.015120172605483689,
						"alias": " - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.581,
						"acc_stderr,none": 0.015610338967577797,
						"alias": " - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.598,
						"acc_stderr,none": 0.015512467135715073,
						"alias": " - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.668,
						"acc_stderr,none": 0.014899597242811476,
						"alias": " - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.531,
						"acc_stderr,none": 0.015788865959539003,
						"alias": " - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.197,
						"acc_stderr,none": 0.012583693787968126,
						"alias": " - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.228,
						"acc_stderr,none": 0.013273740700804481,
						"alias": " - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.287,
						"acc_stderr,none": 0.014312087053809965,
						"alias": " - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.467,
						"acc_stderr,none": 0.01578480789113878,
						"alias": " - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.408,
						"acc_stderr,none": 0.015549205052920675,
						"alias": " - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.014553205687950424,
						"alias": " - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.01391220865102135,
						"alias": " - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.015387682761897066,
						"alias": " - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.012953717566737237,
						"alias": " - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.677,
						"acc_stderr,none": 0.014794927843348633,
						"alias": " - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244068,
						"alias": " - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.757,
						"acc_stderr,none": 0.013569640199177429,
						"alias": " - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.515,
						"acc_stderr,none": 0.015812179641814902,
						"alias": " - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.384,
						"acc_stderr,none": 0.015387682761897068,
						"alias": " - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.603,
						"acc_stderr,none": 0.015480007449307992,
						"alias": " - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.485,
						"acc_stderr,none": 0.015812179641814892,
						"alias": " - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.617,
						"acc_stderr,none": 0.015380102325652706,
						"alias": " - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.01567232023733621,
						"alias": " - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.461,
						"acc_stderr,none": 0.015771104201283186,
						"alias": " - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.677,
						"acc_stderr,none": 0.01479492784334863,
						"alias": " - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.623,
						"acc_stderr,none": 0.015333170125779859,
						"alias": " - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.543,
						"acc_stderr,none": 0.01576069159013638,
						"alias": " - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.513,
						"acc_stderr,none": 0.015813952101896633,
						"alias": " - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.523,
						"acc_stderr,none": 0.0158025542467261,
						"alias": " - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.012155153135511949,
						"alias": " - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.797,
						"acc_stderr,none": 0.012726073744598264,
						"alias": " - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.01188449583454166,
						"alias": " - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.842,
						"acc_stderr,none": 0.011539894677559571,
						"alias": " - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617328,
						"alias": " - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.819,
						"acc_stderr,none": 0.01218143617917792,
						"alias": " - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.253,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.231,
						"acc_stderr,none": 0.01333479721693644,
						"alias": " - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6214067278287462,
						"acc_stderr,none": 0.008483341718024479,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.4642857142857143,
						"acc_stderr,none": 0.06724777654937658,
						"alias": "cb",
						"f1,none": 0.44487427466150864,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2555720653789004,
						"acc_norm,none": 0.2555720653789004,
						"acc_norm_stderr,none": 0.011832134124595545,
						"acc_stderr,none": 0.011832134124595545,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.06818181818181816,
						"acc_stderr,none": 0.06818181818181816,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275463,
						"acc_stderr,none": 0.08124094920275463,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.3191489361702128,
						"acc_norm,none": 0.3191489361702128,
						"acc_norm_stderr,none": 0.0687296045180637,
						"acc_stderr,none": 0.0687296045180637,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.10094660663590604,
						"acc_stderr,none": 0.10094660663590604,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.052486388108147805,
						"acc_stderr,none": 0.052486388108147805,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.13513513513513514,
						"acc_norm,none": 0.13513513513513514,
						"acc_norm_stderr,none": 0.05697797585888969,
						"acc_stderr,none": 0.05697797585888969,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.09523809523809523,
						"acc_norm,none": 0.09523809523809523,
						"acc_norm_stderr,none": 0.06563832739090582,
						"acc_stderr,none": 0.06563832739090582,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.11180339887498948,
						"acc_stderr,none": 0.11180339887498948,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.13793103448275862,
						"acc_norm,none": 0.13793103448275862,
						"acc_norm_stderr,none": 0.06516628844986677,
						"acc_stderr,none": 0.06516628844986677,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.1891891891891892,
						"acc_norm,none": 0.1891891891891892,
						"acc_norm_stderr,none": 0.06527647182968213,
						"acc_stderr,none": 0.06527647182968213,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.3225806451612903,
						"acc_norm,none": 0.3225806451612903,
						"acc_norm_stderr,none": 0.08534681648595453,
						"acc_stderr,none": 0.08534681648595453,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.0967741935483871,
						"acc_norm,none": 0.0967741935483871,
						"acc_norm_stderr,none": 0.053978066228004884,
						"acc_stderr,none": 0.053978066228004884,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09933992677987828,
						"acc_stderr,none": 0.09933992677987828,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.15075567228888181,
						"acc_stderr,none": 0.15075567228888181,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.09090909090909091,
						"acc_norm,none": 0.09090909090909091,
						"acc_norm_stderr,none": 0.06273323266748675,
						"acc_stderr,none": 0.06273323266748675,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.391304347826087,
						"acc_norm,none": 0.391304347826087,
						"acc_norm_stderr,none": 0.10405096111532161,
						"acc_stderr,none": 0.10405096111532161,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.14285714285714285,
						"acc_norm,none": 0.14285714285714285,
						"acc_norm_stderr,none": 0.05050762722761052,
						"acc_stderr,none": 0.05050762722761052,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.4444444444444444,
						"acc_norm,none": 0.4444444444444444,
						"acc_norm_stderr,none": 0.12051692101036454,
						"acc_stderr,none": 0.12051692101036454,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.06652247352247599,
						"acc_stderr,none": 0.06652247352247599,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.06390760676613884,
						"acc_stderr,none": 0.06390760676613884,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.15217391304347827,
						"acc_norm,none": 0.15217391304347827,
						"acc_norm_stderr,none": 0.053544791889597564,
						"acc_stderr,none": 0.053544791889597564,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.23743740286651702,
						"acc_norm,none": 0.23743740286651702,
						"acc_norm_stderr,none": 0.003955069878474568,
						"acc_stderr,none": 0.003955069878474568,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.22485207100591717,
						"acc_norm,none": 0.22485207100591717,
						"acc_norm_stderr,none": 0.03220965704514524,
						"acc_stderr,none": 0.03220965704514524,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03571428571428571,
						"acc_stderr,none": 0.03571428571428571,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.22560975609756098,
						"acc_norm,none": 0.22560975609756098,
						"acc_norm_stderr,none": 0.03273897454566343,
						"acc_stderr,none": 0.03273897454566343,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.20606060606060606,
						"acc_norm,none": 0.20606060606060606,
						"acc_norm_stderr,none": 0.03158415324047708,
						"acc_stderr,none": 0.03158415324047708,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.22488038277511962,
						"acc_norm,none": 0.22488038277511962,
						"acc_norm_stderr,none": 0.028948661140327035,
						"acc_stderr,none": 0.028948661140327035,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2375,
						"acc_norm,none": 0.2375,
						"acc_norm_stderr,none": 0.03374839851779223,
						"acc_stderr,none": 0.03374839851779223,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.16793893129770993,
						"acc_norm,none": 0.16793893129770993,
						"acc_norm_stderr,none": 0.032785485373431386,
						"acc_stderr,none": 0.032785485373431386,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25735294117647056,
						"acc_norm,none": 0.25735294117647056,
						"acc_norm_stderr,none": 0.037626074966240076,
						"acc_stderr,none": 0.037626074966240076,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.16822429906542055,
						"acc_norm,none": 0.16822429906542055,
						"acc_norm_stderr,none": 0.03633243837141833,
						"acc_stderr,none": 0.03633243837141833,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25386996904024767,
						"acc_norm,none": 0.25386996904024767,
						"acc_norm_stderr,none": 0.024254090252458043,
						"acc_stderr,none": 0.024254090252458043,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.21568627450980393,
						"acc_norm,none": 0.21568627450980393,
						"acc_norm_stderr,none": 0.028867431449849313,
						"acc_stderr,none": 0.028867431449849313,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.22905027932960895,
						"acc_norm,none": 0.22905027932960895,
						"acc_norm_stderr,none": 0.031496945533078094,
						"acc_stderr,none": 0.031496945533078094,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.20253164556962025,
						"acc_norm,none": 0.20253164556962025,
						"acc_norm_stderr,none": 0.026160568246601443,
						"acc_stderr,none": 0.026160568246601443,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.04396093377439375,
						"acc_stderr,none": 0.04396093377439375,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.19626168224299065,
						"acc_norm,none": 0.19626168224299065,
						"acc_norm_stderr,none": 0.038576441428227824,
						"acc_stderr,none": 0.038576441428227824,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.19811320754716982,
						"acc_norm,none": 0.19811320754716982,
						"acc_norm_stderr,none": 0.03889722288318549,
						"acc_stderr,none": 0.03889722288318549,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.1574074074074074,
						"acc_norm,none": 0.1574074074074074,
						"acc_norm_stderr,none": 0.03520703990517965,
						"acc_stderr,none": 0.03520703990517965,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.14285714285714285,
						"acc_norm,none": 0.14285714285714285,
						"acc_norm_stderr,none": 0.03431317581537583,
						"acc_stderr,none": 0.03431317581537583,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.04022559246936712,
						"acc_stderr,none": 0.04022559246936712,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.27472527472527475,
						"acc_norm,none": 0.27472527472527475,
						"acc_norm_stderr,none": 0.027065504564389532,
						"acc_stderr,none": 0.027065504564389532,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.24509803921568626,
						"acc_norm,none": 0.24509803921568626,
						"acc_norm_stderr,none": 0.03019028245350195,
						"acc_stderr,none": 0.03019028245350195,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2046783625730994,
						"acc_norm,none": 0.2046783625730994,
						"acc_norm_stderr,none": 0.030944459778533228,
						"acc_stderr,none": 0.030944459778533228,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.035589261576067566,
						"acc_stderr,none": 0.035589261576067566,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.20863309352517986,
						"acc_norm,none": 0.20863309352517986,
						"acc_norm_stderr,none": 0.03458923827478227,
						"acc_stderr,none": 0.03458923827478227,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.20754716981132076,
						"acc_norm,none": 0.20754716981132076,
						"acc_norm_stderr,none": 0.032263878587129174,
						"acc_stderr,none": 0.032263878587129174,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.27607361963190186,
						"acc_norm,none": 0.27607361963190186,
						"acc_norm_stderr,none": 0.0351238528370505,
						"acc_stderr,none": 0.0351238528370505,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.03430085607014882,
						"acc_stderr,none": 0.03430085607014882,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.27380952380952384,
						"acc_norm,none": 0.27380952380952384,
						"acc_norm_stderr,none": 0.028145741115683843,
						"acc_stderr,none": 0.028145741115683843,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.031156269519646836,
						"acc_stderr,none": 0.031156269519646836,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.21428571428571427,
						"acc_norm,none": 0.21428571428571427,
						"acc_norm_stderr,none": 0.026653531596715487,
						"acc_stderr,none": 0.026653531596715487,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.027256850838819964,
						"acc_stderr,none": 0.027256850838819964,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.23703703703703705,
						"acc_norm,none": 0.23703703703703705,
						"acc_norm_stderr,none": 0.03673731683969506,
						"acc_stderr,none": 0.03673731683969506,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.23863636363636365,
						"acc_norm,none": 0.23863636363636365,
						"acc_norm_stderr,none": 0.03222147017899509,
						"acc_stderr,none": 0.03222147017899509,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.24161073825503357,
						"acc_norm,none": 0.24161073825503357,
						"acc_norm_stderr,none": 0.03518627932594347,
						"acc_stderr,none": 0.03518627932594347,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.20710059171597633,
						"acc_norm,none": 0.20710059171597633,
						"acc_norm_stderr,none": 0.031264038430270896,
						"acc_stderr,none": 0.031264038430270896,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.0374425492857706,
						"acc_stderr,none": 0.0374425492857706,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.039803298549204336,
						"acc_stderr,none": 0.039803298549204336,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.03445000289173461,
						"acc_stderr,none": 0.03445000289173461,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878284,
						"acc_stderr,none": 0.04122066502878284,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03873144730600104,
						"acc_stderr,none": 0.03873144730600104,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.21428571428571427,
						"acc_norm,none": 0.21428571428571427,
						"acc_norm_stderr,none": 0.03670066451047182,
						"acc_stderr,none": 0.03670066451047182,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.029488391230979388,
						"acc_stderr,none": 0.029488391230979388,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2616279069767442,
						"acc_norm,none": 0.2616279069767442,
						"acc_norm_stderr,none": 0.03361101403890494,
						"acc_stderr,none": 0.03361101403890494,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.22871046228710462,
						"acc_norm,none": 0.22871046228710462,
						"acc_norm_stderr,none": 0.020742438335834938,
						"acc_stderr,none": 0.020742438335834938,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.22429906542056074,
						"acc_norm,none": 0.22429906542056074,
						"acc_norm_stderr,none": 0.02858058327333863,
						"acc_stderr,none": 0.02858058327333863,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.04119323030208567,
						"acc_stderr,none": 0.04119323030208567,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.27049180327868855,
						"acc_norm,none": 0.27049180327868855,
						"acc_norm_stderr,none": 0.04038308168357441,
						"acc_stderr,none": 0.04038308168357441,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.02985642316467188,
						"acc_stderr,none": 0.02985642316467188,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.03107384484365942,
						"acc_stderr,none": 0.03107384484365942,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.21164021164021163,
						"acc_norm,none": 0.21164021164021163,
						"acc_norm_stderr,none": 0.0297908161324195,
						"acc_stderr,none": 0.0297908161324195,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.20689655172413793,
						"acc_norm,none": 0.20689655172413793,
						"acc_norm_stderr,none": 0.037773969483114886,
						"acc_stderr,none": 0.037773969483114886,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.25517241379310346,
						"acc_norm,none": 0.25517241379310346,
						"acc_norm_stderr,none": 0.03632984052707842,
						"acc_stderr,none": 0.03632984052707842,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.18857142857142858,
						"acc_norm,none": 0.18857142857142858,
						"acc_norm_stderr,none": 0.029654354112075402,
						"acc_stderr,none": 0.029654354112075402,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.23696682464454977,
						"acc_norm,none": 0.23696682464454977,
						"acc_norm_stderr,none": 0.029343089448667727,
						"acc_stderr,none": 0.029343089448667727,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.24202127659574468,
						"acc_norm,none": 0.24202127659574468,
						"acc_norm_stderr,none": 0.022117683921586997,
						"acc_stderr,none": 0.022117683921586997,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.2629310344827586,
						"acc_norm,none": 0.2629310344827586,
						"acc_norm_stderr,none": 0.028964697544540167,
						"acc_stderr,none": 0.028964697544540167,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.033291151121447815,
						"acc_stderr,none": 0.033291151121447815,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.22962962962962963,
						"acc_norm,none": 0.22962962962962963,
						"acc_norm_stderr,none": 0.036333844140734664,
						"acc_stderr,none": 0.036333844140734664,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.26548672566371684,
						"acc_norm,none": 0.26548672566371684,
						"acc_norm_stderr,none": 0.02943946890825876,
						"acc_stderr,none": 0.02943946890825876,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.03192271569548299,
						"acc_stderr,none": 0.03192271569548299,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.035714285714285685,
						"acc_stderr,none": 0.035714285714285685,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.03404916326237584,
						"acc_stderr,none": 0.03404916326237584,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": "cola",
						"mcc,none": -0.023156201795729782,
						"mcc_stderr,none": 0.02703057769899001
					},
					"copa": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.04793724854411018,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 13.177251043530113,
						"likelihood_diff_stderr,none": 0.17239541498920263,
						"pct_stereotype,none": 0.44991055456171736,
						"pct_stereotype_stderr,none": 0.006025660934263797
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 9.292486583184258,
						"likelihood_diff_stderr,none": 0.2366791018268126,
						"pct_stereotype,none": 0.46332737030411447,
						"pct_stereotype_stderr,none": 0.012180404031943275
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 7.93956043956044,
						"likelihood_diff_stderr,none": 0.8346083712642237,
						"pct_stereotype,none": 0.5494505494505495,
						"pct_stereotype_stderr,none": 0.05244623100101224
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 14.636363636363637,
						"likelihood_diff_stderr,none": 4.6397937400722045,
						"pct_stereotype,none": 0.5454545454545454,
						"pct_stereotype_stderr,none": 0.1574591643244434
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 12.215384615384615,
						"likelihood_diff_stderr,none": 1.367943836652911,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.06231481440776789
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 8.6796875,
						"likelihood_diff_stderr,none": 0.5659089316956081,
						"pct_stereotype,none": 0.4625,
						"pct_stereotype_stderr,none": 0.02791577963000663
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 9.50925925925926,
						"likelihood_diff_stderr,none": 0.6782660398958518,
						"pct_stereotype,none": 0.5509259259259259,
						"pct_stereotype_stderr,none": 0.03392238405321617
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 8.409722222222221,
						"likelihood_diff_stderr,none": 1.0483856725819214,
						"pct_stereotype,none": 0.4027777777777778,
						"pct_stereotype_stderr,none": 0.05820650942569532
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 9.266732283464567,
						"likelihood_diff_stderr,none": 0.4272258062356487,
						"pct_stereotype,none": 0.41929133858267714,
						"pct_stereotype_stderr,none": 0.021914578288494874
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 9.405405405405405,
						"likelihood_diff_stderr,none": 1.042788768536943,
						"pct_stereotype,none": 0.4594594594594595,
						"pct_stereotype_stderr,none": 0.04751616610765046
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 10.155913978494624,
						"likelihood_diff_stderr,none": 0.8672384505277926,
						"pct_stereotype,none": 0.4946236559139785,
						"pct_stereotype_stderr,none": 0.05212558986469174
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 9.33157894736842,
						"likelihood_diff_stderr,none": 0.6220807183273035,
						"pct_stereotype,none": 0.4473684210526316,
						"pct_stereotype_stderr,none": 0.036167593207172444
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 17.06201550387597,
						"likelihood_diff_stderr,none": 0.42952231320384837,
						"pct_stereotype,none": 0.4364937388193202,
						"pct_stereotype_stderr,none": 0.01211438509572501
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 15.355555555555556,
						"likelihood_diff_stderr,none": 1.706860732698114,
						"pct_stereotype,none": 0.32222222222222224,
						"pct_stereotype_stderr,none": 0.04953662380574454
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 17.384615384615383,
						"likelihood_diff_stderr,none": 4.691493203795816,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 23.12878787878788,
						"likelihood_diff_stderr,none": 2.7068141288199103,
						"pct_stereotype,none": 0.42424242424242425,
						"pct_stereotype_stderr,none": 0.06130137276858362
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 16.91588785046729,
						"likelihood_diff_stderr,none": 0.8211021922302403,
						"pct_stereotype,none": 0.4423676012461059,
						"pct_stereotype_stderr,none": 0.027764551737212474
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 20.136363636363637,
						"likelihood_diff_stderr,none": 1.0529316195853855,
						"pct_stereotype,none": 0.31225296442687744,
						"pct_stereotype_stderr,none": 0.02919223713357907
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 15.5,
						"likelihood_diff_stderr,none": 2.0766088658788484,
						"pct_stereotype,none": 0.6111111111111112,
						"pct_stereotype_stderr,none": 0.05785537103478462
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 13.584782608695653,
						"likelihood_diff_stderr,none": 0.7920677884151252,
						"pct_stereotype,none": 0.3717391304347826,
						"pct_stereotype_stderr,none": 0.022557075965613523
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 19.769565217391303,
						"likelihood_diff_stderr,none": 2.376590053115829,
						"pct_stereotype,none": 0.3826086956521739,
						"pct_stereotype_stderr,none": 0.04552031372871532
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 24.912087912087912,
						"likelihood_diff_stderr,none": 1.7170159217713166,
						"pct_stereotype,none": 0.8131868131868132,
						"pct_stereotype_stderr,none": 0.0410844685503588
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 15.553571428571429,
						"likelihood_diff_stderr,none": 1.143420047957058,
						"pct_stereotype,none": 0.5816326530612245,
						"pct_stereotype_stderr,none": 0.035325309438765606
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,flexible-extract": 0.002274450341167551,
						"exact_match,strict-match": 0.0,
						"exact_match_stderr,flexible-extract": 0.0013121578148674068,
						"exact_match_stderr,strict-match": 0.0
					},
					"hellaswag": {
						"acc,none": 0.2592113124875523,
						"acc_norm,none": 0.27394941246763593,
						"acc_norm_stderr,none": 0.004450718673552655,
						"acc_stderr,none": 0.0043730622833765016,
						"alias": "hellaswag"
					},
					"kobest": {
						"acc,none": 0.4709493532120149,
						"acc_stderr,none": 0.007333438274771031,
						"alias": "kobest",
						"f1,none": 0.37450243633981517,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701182,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.474,
						"acc_stderr,none": 0.015797897758042766,
						"alias": " - kobest_copa",
						"f1,none": 0.472099558410277,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.292,
						"acc_norm,none": 0.364,
						"acc_norm_stderr,none": 0.021539170637317695,
						"acc_stderr,none": 0.020354375480530075,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.291134910888595,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5239294710327456,
						"acc_stderr,none": 0.02509715366855094,
						"alias": " - kobest_sentineg",
						"f1,none": 0.5234941098021783,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604038,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada_multilingual": {
						"acc,none": 0.004346982340384243,
						"acc_stderr,none": 0.0004098404757155364,
						"alias": "lambada_multilingual",
						"perplexity,none": 95048329591.87683,
						"perplexity_stderr,none": 11493246193.665659
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.005627789637104599,
						"acc_stderr,none": 0.0010422106094106795,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 31357036723.78422,
						"perplexity_stderr,none": 5804900538.3750925
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.0038812342324859306,
						"acc_stderr,none": 0.0008662685754551872,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 5907735293.930233,
						"perplexity_stderr,none": 898791642.619992
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.003687172520861634,
						"acc_stderr,none": 0.000844416406909331,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 342825859691.37396,
						"perplexity_stderr,none": 55110781630.217094
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.005627789637104599,
						"acc_stderr,none": 0.0010422106094106732,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 5549227132.785457,
						"perplexity_stderr,none": 862756699.5458934
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.002910925674364448,
						"acc_stderr,none": 0.0007505758899360263,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 89601789117.51031,
						"perplexity_stderr,none": 15163126459.008951
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.0,
						"exact_match_stderr,get-answer": 0.0
					},
					"logiqa": {
						"acc,none": 0.23348694316436253,
						"acc_norm,none": 0.25960061443932414,
						"acc_norm_stderr,none": 0.01719607000818003,
						"acc_stderr,none": 0.016593362460570887,
						"alias": "logiqa"
					},
					"logiqa2": {
						"acc,none": 0.20229007633587787,
						"acc_norm,none": 0.2627226463104326,
						"acc_norm_stderr,none": 0.011103914513421435,
						"acc_stderr,none": 0.010134951949549276,
						"alias": "logiqa2"
					},
					"mc_taco": {
						"acc,none": 0.4130480830332557,
						"acc_stderr,none": 0.00506748613754838,
						"alias": "mc_taco",
						"f1,none": 0.4775641025641026,
						"f1_stderr,none": 0.005993645084626725
					},
					"medmcqa": {
						"acc,none": 0.2204159693999522,
						"acc_norm,none": 0.2204159693999522,
						"acc_norm_stderr,none": 0.006410043564341527,
						"acc_stderr,none": 0.006410043564341527,
						"alias": " - medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.25294579732914374,
						"acc_norm,none": 0.25294579732914374,
						"acc_norm_stderr,none": 0.012188386992159895,
						"acc_stderr,none": 0.012188386992159895,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.2469733656174334,
						"acc_stderr,none": 0.003636673741729959,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909284,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.2518518518518518,
						"acc_stderr,none": 0.03749850709174021,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.2894736842105263,
						"acc_stderr,none": 0.036906779861372814,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909284,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.27169811320754716,
						"acc_stderr,none": 0.027377706624670713,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2638888888888889,
						"acc_stderr,none": 0.03685651095897532,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.34104046242774566,
						"acc_stderr,none": 0.036146654241808254,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.04092563958237655,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909284,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.23829787234042554,
						"acc_stderr,none": 0.02785125297388978,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.0404933929774814,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2689655172413793,
						"acc_stderr,none": 0.03695183311650232,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.22486772486772486,
						"acc_stderr,none": 0.021502096078229147,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3412698412698413,
						"acc_stderr,none": 0.04240799327574923,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.23548387096774193,
						"acc_stderr,none": 0.024137632429337717,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.24630541871921183,
						"acc_stderr,none": 0.030315099285617732,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.21818181818181817,
						"acc_stderr,none": 0.03225078108306289,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.30808080808080807,
						"acc_stderr,none": 0.032894773300986155,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.23834196891191708,
						"acc_stderr,none": 0.030748905363909895,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3230769230769231,
						"acc_stderr,none": 0.02371088850197056,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.026719240783712163,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2815126050420168,
						"acc_stderr,none": 0.029213549414372163,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2251655629139073,
						"acc_stderr,none": 0.03410435282008937,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.27889908256880735,
						"acc_stderr,none": 0.01922746887646352,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2361111111111111,
						"acc_stderr,none": 0.02896370257079102,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.030190282453501967,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.26582278481012656,
						"acc_stderr,none": 0.028756799629658335,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.20179372197309417,
						"acc_stderr,none": 0.026936111912802273,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.21374045801526717,
						"acc_stderr,none": 0.0359546161177469,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2452709883103082,
						"acc_stderr,none": 0.0062727475783044976,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2066115702479339,
						"acc_stderr,none": 0.03695980128098825,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.21296296296296297,
						"acc_stderr,none": 0.03957835471980978,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.22699386503067484,
						"acc_stderr,none": 0.03291099578615768,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.22321428571428573,
						"acc_stderr,none": 0.039523019677025116,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2912621359223301,
						"acc_stderr,none": 0.044986763205729224,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2094017094017094,
						"acc_stderr,none": 0.026655699653922744,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2260536398467433,
						"acc_stderr,none": 0.014957458504335837,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.23699421965317918,
						"acc_stderr,none": 0.02289408248992599,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2636871508379888,
						"acc_stderr,none": 0.014736926383761997,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.21241830065359477,
						"acc_stderr,none": 0.02342037547829613,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.23688445445767622,
						"acc_stderr,none": 0.007619024706686291,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2282958199356913,
						"acc_stderr,none": 0.023839303311398222,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.22839506172839505,
						"acc_stderr,none": 0.023358211840626267,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25177304964539005,
						"acc_stderr,none": 0.0258921511567094,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.24511082138200782,
						"acc_stderr,none": 0.010986307870045507,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.22426470588235295,
						"acc_stderr,none": 0.025336848563332348,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.016819028375736393,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.23636363636363636,
						"acc_stderr,none": 0.04069306319721376,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.20408163265306123,
						"acc_stderr,none": 0.02580128347509051,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.25804354891127723,
						"acc_stderr,none": 0.007871590459686583,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.22388059701492538,
						"acc_stderr,none": 0.029475250236017193,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.24865207738661593,
						"acc_stderr,none": 0.007701731877240684,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.2289156626506024,
						"acc_stderr,none": 0.03270745277352477,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.03188578017686397,
						"alias": "  - world_religions"
					},
					"mrpc": {
						"acc,none": 0.6813725490196079,
						"acc_stderr,none": 0.023095996571841474,
						"alias": "mrpc",
						"f1,none": 0.8104956268221575,
						"f1_stderr,none": 0.016356948758338184
					},
					"multimedqa": {
						"acc,none": 0.25691980127750175,
						"acc_stderr,none": 0.005109843318904429,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5719884488448845,
						"acc_stderr,none": 0.007106976252751527,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.5557750188111341,
						"mrr_stderr,none": 0.00995958641360277,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407435,
						"r@2,none": 0.43792325056433407,
						"r@2_stderr,none": 0.016677278334075053
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.5558690744920973,
						"mrr_stderr,none": 0.009913979679350315,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.01473704740275095,
						"r@2,none": 0.4717832957110609,
						"r@2_stderr,none": 0.01678053141516135
					},
					"openbookqa": {
						"acc,none": 0.156,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.01982771485958758,
						"acc_stderr,none": 0.016243636028391097,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4845,
						"acc_stderr,none": 0.011177761232603323,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.5345,
						"acc_stderr,none": 0.011156482803925174,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4905,
						"acc_stderr,none": 0.011181117282805231,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.527,
						"acc_stderr,none": 0.01116681910502999,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.533,
						"acc_stderr,none": 0.011158752568250663,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5345,
						"acc_stderr,none": 0.01115648280392517,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.011131484850525782,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5217142857142857,
						"acc_stderr,none": 0.0042185680657789324,
						"alias": "pawsx"
					},
					"prost": {
						"acc,none": 0.21925704526046114,
						"acc_norm,none": 0.3024658411614005,
						"acc_norm_stderr,none": 0.003355784462147765,
						"acc_stderr,none": 0.0030227620898675557,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.02226169729227011,
						"alias": "pubmedqa"
					},
					"qa4mre": {
						"acc,none": 0.14716312056737588,
						"acc_norm,none": 0.18262411347517732,
						"acc_norm_stderr,none": 0.01624501221316162,
						"acc_stderr,none": 0.01493662217402647,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.125,
						"acc_norm,none": 0.21666666666666667,
						"acc_norm_stderr,none": 0.03776555522604266,
						"acc_stderr,none": 0.030316953129541618,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.13125,
						"acc_norm,none": 0.21875,
						"acc_norm_stderr,none": 0.032784644885244255,
						"acc_stderr,none": 0.02677925573528599,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.16549295774647887,
						"acc_norm,none": 0.14788732394366197,
						"acc_norm_stderr,none": 0.02110186160297503,
						"acc_stderr,none": 0.02209080510658783,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.006765015986877456,
						"alias": "qnli"
					},
					"race": {
						"acc,none": 0.22009569377990432,
						"acc_stderr,none": 0.012822602595318807,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5090252707581228,
						"acc_stderr,none": 0.030091559826331334,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.319,
						"acc_norm,none": 0.316,
						"acc_norm_stderr,none": 0.014709193056057127,
						"acc_stderr,none": 0.01474640486547349,
						"alias": "sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5090252707581228,
						"acc_stderr,none": 0.030091559826331334,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.4896788990825688,
						"acc_stderr,none": 0.01693824383857661,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.2817654703588923,
						"acc_norm,none": 0.29461161651504547,
						"acc_norm_stderr,none": 0.0032230705159190507,
						"acc_stderr,none": 0.0031805904270671257,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5438754117999401,
						"acc_stderr,none": 0.0028497926862090625,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.005004255426437999,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.6348434174521131,
						"acc_stderr,none": 0.004847327040906145,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.4988235294117647,
						"acc_stderr,none": 0.004950966710795893,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"wic": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.01981072129375818,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 6.0626892278969535,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 66.84228872104099,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 5746081807.172335,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.4980268350434096,
						"acc_stderr,none": 0.014052376259225629,
						"alias": "winogrande"
					},
					"wnli": {
						"acc,none": 0.5211267605633803,
						"acc_stderr,none": 0.05970805879899504,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": "wsc"
					},
					"wsc273": {
						"acc,none": 0.48717948717948717,
						"acc_stderr,none": 0.03030698536562609,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5176363636363637,
						"acc_stderr,none": 0.006739757879616855,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928025,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.022331264423258383,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269962,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.022252153078595897,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.022342748192502843,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928028,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.022371610982580396,
						"alias": " - xcopa_zh"
					},
					"xstorycloze": {
						"acc,none": 0.47488117441790506,
						"acc_stderr,none": 0.003872459803504528,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.4599602911978822,
						"acc_stderr,none": 0.01282580237008399,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.4818001323626737,
						"acc_stderr,none": 0.012858598401831848,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.4818001323626737,
						"acc_stderr,none": 0.012858598401831848,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.49636002647253474,
						"acc_stderr,none": 0.01286678434828923,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.47650562541363334,
						"acc_stderr,none": 0.012852912530051752,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.4599602911978822,
						"acc_stderr,none": 0.012825802370083992,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.456651224354732,
						"acc_stderr,none": 0.01281867645248195,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.4698874917273329,
						"acc_stderr,none": 0.012843769248432169,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.45929847782925215,
						"acc_stderr,none": 0.01282442273962558,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.513567174056916,
						"acc_stderr,none": 0.01286238758665008,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.46790205162144277,
						"acc_stderr,none": 0.012840584503982028,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.5032591593616543,
						"acc_stderr,none": 0.007499652918045967,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.5079569892473118,
						"acc_stderr,none": 0.010370434240417212,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.46987951807228917,
						"acc_stderr,none": 0.05511548370029596,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5099061522419187,
						"acc_stderr,none": 0.016151095936358936,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.49809885931558934,
						"acc_stderr,none": 0.030889879865535985,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.49206349206349204,
						"acc_stderr,none": 0.02821307754781505,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.48412698412698413,
						"acc_stderr,none": 0.02228266125886959,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "google/gemma-7b"
	},
	"google/gemma-7b-it": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.2900225479143179,
						"acc_norm,none": 0.29988726042841035,
						"acc_norm_stderr,none": 0.0076571636813316316,
						"acc_stderr,none": 0.00753505815447761,
						"alias": "ai2_arc"
					},
					"anli": {
						"acc,none": 0.33625,
						"acc_stderr,none": 0.008355257041054116,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.599865671641791,
						"acc_stderr,none": 0.0016959604114560741,
						"alias": "blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2303120356612184,
						"acc_norm,none": 0.2303120356612184,
						"acc_norm_stderr,none": 0.011517420398032593,
						"acc_stderr,none": 0.011517420398032593,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2524607149024348,
						"acc_norm,none": 0.2524607149024348,
						"acc_norm_stderr,none": 0.004046181414671486,
						"acc_stderr,none": 0.004046181414671486,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 14.49716756112105,
						"likelihood_diff_stderr,none": 0.1840973247991192,
						"pct_stereotype,none": 0.4749552772808587,
						"pct_stereotype_stderr,none": 0.006046608287716455
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"kobest": {
						"acc,none": 0.47138785354089013,
						"acc_stderr,none": 0.007358232995010896,
						"alias": "kobest",
						"f1,none": 0.37083123331466056,
						"f1_stderr,none": "N/A"
					},
					"lambada_multilingual": {
						"acc,none": 0.10188239860275568,
						"acc_stderr,none": 0.0018602710248577913,
						"alias": "lambada_multilingual",
						"perplexity,none": 2874198.6132353013,
						"perplexity_stderr,none": 231915.3091997584
					},
					"mmlu": {
						"acc,none": 0.25117504628970233,
						"acc_stderr,none": 0.0036561249199233894,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.25334750265674816,
						"acc_stderr,none": 0.006332763902318966,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2587705181847441,
						"acc_stderr,none": 0.007837914850158799,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.254793630159246,
						"acc_stderr,none": 0.007865411918359159,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2369172216936251,
						"acc_stderr,none": 0.007566553902040756,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.29694819020581975,
						"acc_stderr,none": 0.005375895101184015,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5152857142857142,
						"acc_stderr,none": 0.004215109592948072,
						"alias": "pawsx"
					},
					"qa4mre": {
						"acc,none": 0.2624113475177305,
						"acc_norm,none": 0.3067375886524823,
						"acc_norm_stderr,none": 0.01943996038210285,
						"acc_stderr,none": 0.018556031037772654,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6015107650327776,
						"acc_stderr,none": 0.0027951016290707546,
						"alias": "sycophancy"
					},
					"xcopa": {
						"acc,none": 0.5174545454545455,
						"acc_stderr,none": 0.006742013363910066,
						"alias": "xcopa"
					},
					"xstorycloze": {
						"acc,none": 0.4956380482522111,
						"acc_stderr,none": 0.0038728188326428896,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.5185434929197572,
						"acc_stderr,none": 0.007490970617248932,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.2900225479143179,
						"acc_norm,none": 0.29988726042841035,
						"acc_norm_stderr,none": 0.0076571636813316316,
						"acc_stderr,none": 0.00753505815447761,
						"alias": "ai2_arc"
					},
					"anli": {
						"acc,none": 0.33625,
						"acc_stderr,none": 0.008355257041054116,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.337,
						"acc_stderr,none": 0.01495508791865362,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.334,
						"acc_stderr,none": 0.01492201952373296,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3375,
						"acc_stderr,none": 0.013655897185463667,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.19368600682593856,
						"acc_norm,none": 0.2354948805460751,
						"acc_norm_stderr,none": 0.012399451855004772,
						"acc_stderr,none": 0.01154842540997854,
						"alias": " - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.33754208754208753,
						"acc_norm,none": 0.33164983164983164,
						"acc_norm_stderr,none": 0.009660733780923969,
						"acc_stderr,none": 0.009703117820790301,
						"alias": " - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0026030368763557484,
						"acc_stderr,none": 0.001061531641109425,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.599865671641791,
						"acc_stderr,none": 0.0016959604114560741,
						"alias": "blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.433,
						"acc_stderr,none": 0.01567663091218133,
						"alias": " - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.673,
						"acc_stderr,none": 0.014842213153411249,
						"alias": " - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.749,
						"acc_stderr,none": 0.013718133516888923,
						"alias": " - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.634,
						"acc_stderr,none": 0.01524061272640575,
						"alias": " - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.01049924922240803,
						"alias": " - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.447,
						"acc_stderr,none": 0.01573017604600907,
						"alias": " - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.406,
						"acc_stderr,none": 0.0155372264386346,
						"alias": " - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.658,
						"acc_stderr,none": 0.015008706182121731,
						"alias": " - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.015560917136921665,
						"alias": " - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.573,
						"acc_stderr,none": 0.01564978964446221,
						"alias": " - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.01570498795436179,
						"alias": " - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.603,
						"acc_stderr,none": 0.015480007449307987,
						"alias": " - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.667,
						"acc_stderr,none": 0.01491084616422986,
						"alias": " - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.01568087656637506,
						"alias": " - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.01581879370351089,
						"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.015204840912919501,
						"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.577,
						"acc_stderr,none": 0.015630589090476345,
						"alias": " - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.636,
						"acc_stderr,none": 0.015222868840522022,
						"alias": " - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.637,
						"acc_stderr,none": 0.015213890444671278,
						"alias": " - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.014013292702729486,
						"alias": " - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.015797897758042762,
						"alias": " - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366658,
						"alias": " - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.754,
						"acc_stderr,none": 0.013626065817750641,
						"alias": " - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.717,
						"acc_stderr,none": 0.014251810906481753,
						"alias": " - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.697,
						"acc_stderr,none": 0.014539683710535257,
						"alias": " - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.677,
						"acc_stderr,none": 0.014794927843348644,
						"alias": " - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.597,
						"acc_stderr,none": 0.01551875741906654,
						"alias": " - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.453,
						"acc_stderr,none": 0.015749255189977596,
						"alias": " - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.589,
						"acc_stderr,none": 0.015566673418599276,
						"alias": " - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.341,
						"acc_stderr,none": 0.014998131348402702,
						"alias": " - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.507,
						"acc_stderr,none": 0.01581774956184357,
						"alias": " - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.015431725053866608,
						"alias": " - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.015635487471405182,
						"alias": " - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.012461592646659978,
						"alias": " - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.01581879370351089,
						"alias": " - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.066,
						"acc_stderr,none": 0.007855297938697567,
						"alias": " - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.095,
						"acc_stderr,none": 0.00927691010310331,
						"alias": " - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.221,
						"acc_stderr,none": 0.01312750285969624,
						"alias": " - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.023,
						"acc_stderr,none": 0.004742730594656804,
						"alias": " - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.613,
						"acc_stderr,none": 0.015410011955493928,
						"alias": " - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.651,
						"acc_stderr,none": 0.015080663991563098,
						"alias": " - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.687,
						"acc_stderr,none": 0.014671272822977885,
						"alias": " - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.014356395999905692,
						"alias": " - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298284,
						"alias": " - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.015809045699406728,
						"alias": " - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.003299983316607816,
						"alias": " - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.01571250721186421,
						"alias": " - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.605,
						"acc_stderr,none": 0.015466551464829344,
						"alias": " - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.67,
						"acc_stderr,none": 0.014876872027456732,
						"alias": " - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.66,
						"acc_stderr,none": 0.014987482264363935,
						"alias": " - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.633,
						"acc_stderr,none": 0.01524937846417175,
						"alias": " - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.013877773329774166,
						"alias": " - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475286,
						"alias": " - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.469,
						"acc_stderr,none": 0.015788865959539003,
						"alias": " - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.346,
						"acc_stderr,none": 0.015050266127564443,
						"alias": " - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.015605111967541944,
						"alias": " - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.014205696104091505,
						"alias": " - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.01064016979249938,
						"alias": " - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.599,
						"acc_stderr,none": 0.015506109745498323,
						"alias": " - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.015560917136921655,
						"alias": " - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235239,
						"alias": " - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248118,
						"alias": " - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.0076298239962802996,
						"alias": " - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706824,
						"alias": " - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178346,
						"alias": " - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.189,
						"acc_stderr,none": 0.012386784588117705,
						"alias": " - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.056,
						"acc_stderr,none": 0.007274401481697044,
						"alias": " - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.5944954128440367,
						"acc_stderr,none": 0.00858745905544161,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.4107142857142857,
						"acc_stderr,none": 0.06633634150359538,
						"alias": "cb",
						"f1,none": 0.1940928270042194,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2303120356612184,
						"acc_norm,none": 0.2303120356612184,
						"acc_norm_stderr,none": 0.011517420398032593,
						"acc_stderr,none": 0.011517420398032593,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141221,
						"acc_stderr,none": 0.06372446937141221,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757577,
						"acc_stderr,none": 0.07575757575757577,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.06429065810876616,
						"acc_stderr,none": 0.06429065810876616,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.059278386873217015,
						"acc_stderr,none": 0.059278386873217015,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.1724137931034483,
						"acc_norm,none": 0.1724137931034483,
						"acc_norm_stderr,none": 0.0713860923457608,
						"acc_stderr,none": 0.0713860923457608,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.0686105685212965,
						"acc_stderr,none": 0.0686105685212965,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.16129032258064516,
						"acc_norm,none": 0.16129032258064516,
						"acc_norm_stderr,none": 0.06715051611181073,
						"acc_stderr,none": 0.06715051611181073,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.07988892740217941,
						"acc_stderr,none": 0.07988892740217941,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.085947008518708,
						"acc_stderr,none": 0.085947008518708,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.0,
						"acc_norm,none": 0.0,
						"acc_norm_stderr,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.13636363636363635,
						"acc_norm,none": 0.13636363636363635,
						"acc_norm_stderr,none": 0.0748867700952649,
						"acc_stderr,none": 0.0748867700952649,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755131,
						"acc_stderr,none": 0.08780518530755131,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.11236664374387369,
						"acc_stderr,none": 0.11236664374387369,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.08742975048915692,
						"acc_stderr,none": 0.08742975048915692,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.058172215566282534,
						"acc_stderr,none": 0.058172215566282534,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.06957698714453994,
						"acc_stderr,none": 0.06957698714453994,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.061487546190134544,
						"acc_stderr,none": 0.061487546190134544,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0879391124952055,
						"acc_stderr,none": 0.0879391124952055,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2524607149024348,
						"acc_norm,none": 0.2524607149024348,
						"acc_norm_stderr,none": 0.004046181414671486,
						"acc_stderr,none": 0.004046181414671486,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.03250593287417369,
						"acc_stderr,none": 0.03250593287417369,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.0353866849031339,
						"acc_stderr,none": 0.0353866849031339,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.034949590161775394,
						"acc_stderr,none": 0.034949590161775394,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2606060606060606,
						"acc_norm,none": 0.2606060606060606,
						"acc_norm_stderr,none": 0.03427743175816524,
						"acc_stderr,none": 0.03427743175816524,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.24401913875598086,
						"acc_norm,none": 0.24401913875598086,
						"acc_norm_stderr,none": 0.029780753228706103,
						"acc_stderr,none": 0.029780753228706103,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.25190839694656486,
						"acc_norm,none": 0.25190839694656486,
						"acc_norm_stderr,none": 0.03807387116306086,
						"acc_stderr,none": 0.03807387116306086,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037267799624996496,
						"acc_stderr,none": 0.037267799624996496,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.04218811928205305,
						"acc_stderr,none": 0.04218811928205305,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2476780185758514,
						"acc_norm,none": 0.2476780185758514,
						"acc_norm_stderr,none": 0.024055681892974842,
						"acc_stderr,none": 0.024055681892974842,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03039153369274154,
						"acc_stderr,none": 0.03039153369274154,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2569832402234637,
						"acc_norm,none": 0.2569832402234637,
						"acc_norm_stderr,none": 0.03275229252356166,
						"acc_stderr,none": 0.03275229252356166,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.028304657943035296,
						"acc_stderr,none": 0.028304657943035296,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.04522350077382029,
						"acc_stderr,none": 0.04522350077382029,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.044801270921106716,
						"acc_stderr,none": 0.044801270921106716,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980981,
						"acc_stderr,none": 0.03957835471980981,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055041,
						"acc_stderr,none": 0.04232473532055041,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2271062271062271,
						"acc_norm,none": 0.2271062271062271,
						"acc_norm_stderr,none": 0.025403290424595146,
						"acc_stderr,none": 0.025403290424595146,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.24019607843137256,
						"acc_norm,none": 0.24019607843137256,
						"acc_norm_stderr,none": 0.02998373305591361,
						"acc_stderr,none": 0.02998373305591361,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.24561403508771928,
						"acc_norm,none": 0.24561403508771928,
						"acc_norm_stderr,none": 0.0330140594698725,
						"acc_stderr,none": 0.0330140594698725,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.035589261576067587,
						"acc_stderr,none": 0.035589261576067587,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2446043165467626,
						"acc_norm,none": 0.2446043165467626,
						"acc_norm_stderr,none": 0.03659146222520568,
						"acc_stderr,none": 0.03659146222520568,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.0342292401764445,
						"acc_stderr,none": 0.0342292401764445,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.26993865030674846,
						"acc_norm,none": 0.26993865030674846,
						"acc_norm_stderr,none": 0.034878251684978906,
						"acc_stderr,none": 0.034878251684978906,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.26744186046511625,
						"acc_norm,none": 0.26744186046511625,
						"acc_norm_stderr,none": 0.03384836428157859,
						"acc_stderr,none": 0.03384836428157859,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.028271399816988535,
						"acc_stderr,none": 0.028271399816988535,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2474747474747475,
						"acc_norm,none": 0.2474747474747475,
						"acc_norm_stderr,none": 0.030746300742124495,
						"acc_stderr,none": 0.030746300742124495,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2815126050420168,
						"acc_norm,none": 0.2815126050420168,
						"acc_norm_stderr,none": 0.029213549414372163,
						"acc_stderr,none": 0.029213549414372163,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2826086956521739,
						"acc_norm,none": 0.2826086956521739,
						"acc_norm_stderr,none": 0.02975452853823327,
						"acc_stderr,none": 0.02975452853823327,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.037498507091740206,
						"acc_stderr,none": 0.037498507091740206,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.036421927837417066,
						"acc_stderr,none": 0.036421927837417066,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2556818181818182,
						"acc_norm,none": 0.2556818181818182,
						"acc_norm_stderr,none": 0.032976929254344596,
						"acc_stderr,none": 0.032976929254344596,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2483221476510067,
						"acc_norm,none": 0.2483221476510067,
						"acc_norm_stderr,none": 0.0355134404169743,
						"acc_stderr,none": 0.0355134404169743,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714262,
						"acc_stderr,none": 0.04025566684714262,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.034703982128145336,
						"acc_stderr,none": 0.034703982128145336,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.03642192783741706,
						"acc_stderr,none": 0.03642192783741706,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.039325376803928724,
						"acc_stderr,none": 0.039325376803928724,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.03186439492581517,
						"acc_stderr,none": 0.03186439492581517,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.03336605189761063,
						"acc_stderr,none": 0.03336605189761063,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25304136253041365,
						"acc_norm,none": 0.25304136253041365,
						"acc_norm_stderr,none": 0.021470991853398326,
						"acc_stderr,none": 0.021470991853398326,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.24299065420560748,
						"acc_norm,none": 0.24299065420560748,
						"acc_norm_stderr,none": 0.029387023754333132,
						"acc_stderr,none": 0.029387023754333132,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.25203252032520324,
						"acc_norm,none": 0.25203252032520324,
						"acc_norm_stderr,none": 0.03930879526823993,
						"acc_stderr,none": 0.03930879526823993,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2540983606557377,
						"acc_norm,none": 0.2540983606557377,
						"acc_norm_stderr,none": 0.03957756102798663,
						"acc_stderr,none": 0.03957756102798663,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.02985642316467191,
						"acc_stderr,none": 0.02985642316467191,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.03212157057535212,
						"acc_stderr,none": 0.03212157057535212,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871164,
						"acc_stderr,none": 0.03152480234871164,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.22413793103448276,
						"acc_norm,none": 0.22413793103448276,
						"acc_norm_stderr,none": 0.03888669370117824,
						"acc_stderr,none": 0.03888669370117824,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2620689655172414,
						"acc_norm,none": 0.2620689655172414,
						"acc_norm_stderr,none": 0.036646663372252565,
						"acc_stderr,none": 0.036646663372252565,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.04176466758604899,
						"acc_stderr,none": 0.04176466758604899,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.24571428571428572,
						"acc_norm,none": 0.24571428571428572,
						"acc_norm_stderr,none": 0.03263687142627841,
						"acc_stderr,none": 0.03263687142627841,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.030113040167767245,
						"acc_stderr,none": 0.030113040167767245,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2473404255319149,
						"acc_norm,none": 0.2473404255319149,
						"acc_norm_stderr,none": 0.022280822212812246,
						"acc_stderr,none": 0.022280822212812246,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.02881017350806387,
						"acc_stderr,none": 0.02881017350806387,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.032534138484822554,
						"acc_stderr,none": 0.032534138484822554,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.037125378336148665,
						"acc_stderr,none": 0.037125378336148665,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.24778761061946902,
						"acc_norm,none": 0.24778761061946902,
						"acc_norm_stderr,none": 0.028781854672921457,
						"acc_stderr,none": 0.028781854672921457,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03346409881055953,
						"acc_stderr,none": 0.03346409881055953,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.03231470996617758,
						"acc_stderr,none": 0.03231470996617758,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.033341501981019636,
						"acc_stderr,none": 0.033341501981019636,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2422360248447205,
						"acc_norm,none": 0.2422360248447205,
						"acc_norm_stderr,none": 0.033870869961530825,
						"acc_stderr,none": 0.033870869961530825,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.034049163262375844,
						"acc_stderr,none": 0.034049163262375844,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": "cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.04960449637488585,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 14.49716756112105,
						"likelihood_diff_stderr,none": 0.1840973247991192,
						"pct_stereotype,none": 0.4749552772808587,
						"pct_stereotype_stderr,none": 0.006046608287716455
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 11.652057245080501,
						"likelihood_diff_stderr,none": 0.33347369401984267,
						"pct_stereotype,none": 0.47942754919499103,
						"pct_stereotype_stderr,none": 0.012202956874643714
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 10.472527472527473,
						"likelihood_diff_stderr,none": 1.3843130084390212,
						"pct_stereotype,none": 0.4175824175824176,
						"pct_stereotype_stderr,none": 0.05198368783767557
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 15.090909090909092,
						"likelihood_diff_stderr,none": 6.807021168623073,
						"pct_stereotype,none": 0.5454545454545454,
						"pct_stereotype_stderr,none": 0.1574591643244434
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 16.015384615384615,
						"likelihood_diff_stderr,none": 2.021577391031319,
						"pct_stereotype,none": 0.47692307692307695,
						"pct_stereotype_stderr,none": 0.062433396464415106
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 9.78125,
						"likelihood_diff_stderr,none": 0.7069071692135962,
						"pct_stereotype,none": 0.49375,
						"pct_stereotype_stderr,none": 0.02799243838223232
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 13.280092592592593,
						"likelihood_diff_stderr,none": 1.0187233443525137,
						"pct_stereotype,none": 0.49074074074074076,
						"pct_stereotype_stderr,none": 0.034093869469927006
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 9.944444444444445,
						"likelihood_diff_stderr,none": 1.3290554744561571,
						"pct_stereotype,none": 0.5694444444444444,
						"pct_stereotype_stderr,none": 0.058763966770846124
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 11.333661417322835,
						"likelihood_diff_stderr,none": 0.5775442325049999,
						"pct_stereotype,none": 0.4311023622047244,
						"pct_stereotype_stderr,none": 0.021993952705996092
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 13.103603603603604,
						"likelihood_diff_stderr,none": 1.5476080870287339,
						"pct_stereotype,none": 0.6126126126126126,
						"pct_stereotype_stderr,none": 0.0464482507235508
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 10.844086021505376,
						"likelihood_diff_stderr,none": 1.3019920925217743,
						"pct_stereotype,none": 0.5483870967741935,
						"pct_stereotype_stderr,none": 0.05188393075201662
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 12.871052631578948,
						"likelihood_diff_stderr,none": 0.9563880520395072,
						"pct_stereotype,none": 0.45263157894736844,
						"pct_stereotype_stderr,none": 0.036206070458230474
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 17.3422778771616,
						"likelihood_diff_stderr,none": 0.40703314709237437,
						"pct_stereotype,none": 0.47048300536672627,
						"pct_stereotype_stderr,none": 0.012191998897997573
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 15.633333333333333,
						"likelihood_diff_stderr,none": 1.2087333058801968,
						"pct_stereotype,none": 0.36666666666666664,
						"pct_stereotype_stderr,none": 0.05108070528032164
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 17.846153846153847,
						"likelihood_diff_stderr,none": 4.1166425108491245,
						"pct_stereotype,none": 0.38461538461538464,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 25.045454545454547,
						"likelihood_diff_stderr,none": 2.878862101852634,
						"pct_stereotype,none": 0.42424242424242425,
						"pct_stereotype_stderr,none": 0.06130137276858363
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 13.401869158878505,
						"likelihood_diff_stderr,none": 0.7877392263753842,
						"pct_stereotype,none": 0.5046728971962616,
						"pct_stereotype_stderr,none": 0.02794962902436013
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 24.217391304347824,
						"likelihood_diff_stderr,none": 1.0917867455956822,
						"pct_stereotype,none": 0.2964426877470356,
						"pct_stereotype_stderr,none": 0.02876867375801391
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 20.680555555555557,
						"likelihood_diff_stderr,none": 2.341353285371633,
						"pct_stereotype,none": 0.4583333333333333,
						"pct_stereotype_stderr,none": 0.05913268547421811
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 14.410869565217391,
						"likelihood_diff_stderr,none": 0.6258057064995965,
						"pct_stereotype,none": 0.5978260869565217,
						"pct_stereotype_stderr,none": 0.02288695610426314
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 14.947826086956521,
						"likelihood_diff_stderr,none": 1.684172449242904,
						"pct_stereotype,none": 0.3826086956521739,
						"pct_stereotype_stderr,none": 0.04552031372871532
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 25.24175824175824,
						"likelihood_diff_stderr,none": 2.1006040563486477,
						"pct_stereotype,none": 0.7472527472527473,
						"pct_stereotype_stderr,none": 0.0458095185373289
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 16.46938775510204,
						"likelihood_diff_stderr,none": 1.2488908352329682,
						"pct_stereotype,none": 0.336734693877551,
						"pct_stereotype_stderr,none": 0.03384311010566736
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,flexible-extract": 0.027293404094010616,
						"exact_match,strict-match": 0.0,
						"exact_match_stderr,flexible-extract": 0.004488095380209756,
						"exact_match_stderr,strict-match": 0.0
					},
					"hellaswag": {
						"acc,none": 0.3318064130651265,
						"acc_norm,none": 0.3800039832702649,
						"acc_norm_stderr,none": 0.004843954338451429,
						"acc_stderr,none": 0.004698995789478824,
						"alias": "hellaswag"
					},
					"kobest": {
						"acc,none": 0.47138785354089013,
						"acc_stderr,none": 0.007358232995010896,
						"alias": "kobest",
						"f1,none": 0.37083123331466056,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701182,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.476,
						"acc_stderr,none": 0.015801065586651758,
						"alias": " - kobest_copa",
						"f1,none": 0.4751602564102564,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.332,
						"acc_norm,none": 0.398,
						"acc_norm_stderr,none": 0.02191237788577998,
						"acc_stderr,none": 0.021081766571222856,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.33019744488802005,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.473551637279597,
						"acc_stderr,none": 0.025090768761517872,
						"alias": " - kobest_sentineg",
						"f1,none": 0.42441017529985503,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604038,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada_multilingual": {
						"acc,none": 0.10188239860275568,
						"acc_stderr,none": 0.0018602710248577913,
						"alias": "lambada_multilingual",
						"perplexity,none": 2874198.6132353013,
						"perplexity_stderr,none": 231915.3091997584
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.0780128080729672,
						"acc_stderr,none": 0.003736435348933387,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 1953927.8238455623,
						"perplexity_stderr,none": 300933.5478307018
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.19774888414515815,
						"acc_stderr,none": 0.005549121813818655,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 17229.465604028926,
						"perplexity_stderr,none": 2041.1312928659697
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.06423442654764215,
						"acc_stderr,none": 0.0034156985099531695,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 5880634.755255724,
						"perplexity_stderr,none": 847112.1320966688
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.08771589365418203,
						"acc_stderr,none": 0.003941089280335013,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 1736770.4121137708,
						"perplexity_stderr,none": 240377.89097617278
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.08169998059382884,
						"acc_stderr,none": 0.0038160613334053705,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 4782430.609357419,
						"perplexity_stderr,none": 691861.3334968527
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.294529262086514,
						"exact_match_stderr,get-answer": 0.011500471190116962
					},
					"logiqa": {
						"acc,none": 0.22427035330261136,
						"acc_norm,none": 0.2565284178187404,
						"acc_norm_stderr,none": 0.017129443327887562,
						"acc_stderr,none": 0.016360043348265508,
						"alias": "logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2455470737913486,
						"acc_norm,none": 0.26145038167938933,
						"acc_norm_stderr,none": 0.0110865491471325,
						"acc_stderr,none": 0.010859138259206548,
						"alias": "logiqa2"
					},
					"mc_taco": {
						"acc,none": 0.6145943656005084,
						"acc_stderr,none": 0.005008922569149682,
						"alias": "mc_taco",
						"f1,none": 0.26081657525898844,
						"f1_stderr,none": 0.008249714537749546
					},
					"medmcqa": {
						"acc,none": 0.2801816877838872,
						"acc_norm,none": 0.2801816877838872,
						"acc_norm_stderr,none": 0.006944473176787528,
						"acc_stderr,none": 0.006944473176787528,
						"alias": " - medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.27651217596229377,
						"acc_norm,none": 0.27651217596229377,
						"acc_norm_stderr,none": 0.012540913938428879,
						"acc_stderr,none": 0.012540913938428879,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.25117504628970233,
						"acc_stderr,none": 0.0036561249199233894,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.03861229196653695,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.035914440841969694,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.19736842105263158,
						"acc_stderr,none": 0.03238981601699397,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3018867924528302,
						"acc_stderr,none": 0.02825420034443867,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.03852084696008534,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909284,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.23121387283236994,
						"acc_stderr,none": 0.032147373020294696,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.28431372549019607,
						"acc_stderr,none": 0.04488482852329017,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.23404255319148937,
						"acc_stderr,none": 0.027678452578212397,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21929824561403508,
						"acc_stderr,none": 0.03892431106518751,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.25517241379310346,
						"acc_stderr,none": 0.03632984052707842,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.20899470899470898,
						"acc_stderr,none": 0.020940481565334842,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.04040610178208841,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.15,
						"acc_stderr,none": 0.03588702812826371,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.24516129032258063,
						"acc_stderr,none": 0.024472243840895525,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.1477832512315271,
						"acc_stderr,none": 0.024969621333521274,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.20606060606060606,
						"acc_stderr,none": 0.0315841532404771,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.26262626262626265,
						"acc_stderr,none": 0.031353050095330855,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.22797927461139897,
						"acc_stderr,none": 0.030276909945178253,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.24871794871794872,
						"acc_stderr,none": 0.021916957709213803,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23703703703703705,
						"acc_stderr,none": 0.02592887613276612,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2773109243697479,
						"acc_stderr,none": 0.02907937453948001,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2847682119205298,
						"acc_stderr,none": 0.03684881521389023,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.24954128440366974,
						"acc_stderr,none": 0.01855389762950162,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.029886910547626978,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.31862745098039214,
						"acc_stderr,none": 0.0327028718148208,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.24050632911392406,
						"acc_stderr,none": 0.02782078198114968,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.24663677130044842,
						"acc_stderr,none": 0.028930413120910874,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2824427480916031,
						"acc_stderr,none": 0.03948406125768361,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.25334750265674816,
						"acc_stderr,none": 0.006332763902318966,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.24793388429752067,
						"acc_stderr,none": 0.039418975265163025,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.26851851851851855,
						"acc_stderr,none": 0.04284467968052191,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2392638036809816,
						"acc_stderr,none": 0.03351953879521269,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.26785714285714285,
						"acc_stderr,none": 0.04203277291467762,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.2621359223300971,
						"acc_stderr,none": 0.04354631077260595,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.29914529914529914,
						"acc_stderr,none": 0.02999695185834947,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.22860791826309068,
						"acc_stderr,none": 0.015016884698539882,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2630057803468208,
						"acc_stderr,none": 0.02370309952525817,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23575418994413408,
						"acc_stderr,none": 0.014196375686290804,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.32679738562091504,
						"acc_stderr,none": 0.026857294663281416,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2587705181847441,
						"acc_stderr,none": 0.007837914850158799,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.18971061093247588,
						"acc_stderr,none": 0.02226819625878321,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.20987654320987653,
						"acc_stderr,none": 0.022658344085981375,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25886524822695034,
						"acc_stderr,none": 0.026129572527180848,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2757496740547588,
						"acc_stderr,none": 0.011413813609160986,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.22058823529411764,
						"acc_stderr,none": 0.02518778666022728,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2630718954248366,
						"acc_stderr,none": 0.017812676542320657,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2636363636363636,
						"acc_stderr,none": 0.04220224692971987,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2571428571428571,
						"acc_stderr,none": 0.027979823538744546,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.254793630159246,
						"acc_stderr,none": 0.007865411918359159,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.22885572139303484,
						"acc_stderr,none": 0.029705284056772422,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2369172216936251,
						"acc_stderr,none": 0.007566553902040756,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.25301204819277107,
						"acc_stderr,none": 0.03384429155233135,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.29239766081871343,
						"acc_stderr,none": 0.034886477134579215,
						"alias": "  - world_religions"
					},
					"mrpc": {
						"acc,none": 0.6838235294117647,
						"acc_stderr,none": 0.023048336668420193,
						"alias": "mrpc",
						"f1,none": 0.8122270742358079,
						"f1_stderr,none": 0.016275484057001473
					},
					"multimedqa": {
						"acc,none": 0.29694819020581975,
						"acc_stderr,none": 0.005375895101184015,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5338283828382838,
						"acc_stderr,none": 0.007165347123809808,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6141835966892386,
						"mrr_stderr,none": 0.010229143693169778,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407435,
						"r@2,none": 0.43792325056433407,
						"r@2_stderr,none": 0.016677278334075053
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6003574115876584,
						"mrr_stderr,none": 0.010269519002757275,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.01473704740275095,
						"r@2,none": 0.4717832957110609,
						"r@2_stderr,none": 0.01678053141516135
					},
					"openbookqa": {
						"acc,none": 0.182,
						"acc_norm,none": 0.294,
						"acc_norm_stderr,none": 0.020395095484936614,
						"acc_stderr,none": 0.017272773297730446,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.01118233080628221,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.5035,
						"acc_stderr,none": 0.011182862030875625,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.489,
						"acc_stderr,none": 0.011180429374603772,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.464,
						"acc_stderr,none": 0.011154111668060216,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.01113040061763076,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5585,
						"acc_stderr,none": 0.011106329288974696,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5495,
						"acc_stderr,none": 0.011128198119942876,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5152857142857142,
						"acc_stderr,none": 0.004215109592948072,
						"alias": "pawsx"
					},
					"prost": {
						"acc,none": 0.2410333048676345,
						"acc_norm,none": 0.2978223740392827,
						"acc_norm_stderr,none": 0.0033409909976823277,
						"acc_stderr,none": 0.003124805845581139,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.02218721580302904,
						"alias": "pubmedqa"
					},
					"qa4mre": {
						"acc,none": 0.2624113475177305,
						"acc_norm,none": 0.3067375886524823,
						"acc_norm_stderr,none": 0.01943996038210285,
						"acc_stderr,none": 0.018556031037772654,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.275,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.043723731609760286,
						"acc_stderr,none": 0.04093189670742399,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.23125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.03675892481369823,
						"acc_stderr,none": 0.03343758265727745,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.2746478873239437,
						"acc_norm,none": 0.2852112676056338,
						"acc_norm_stderr,none": 0.02683978116774118,
						"acc_stderr,none": 0.026531961212249936,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.499725425590335,
						"acc_stderr,none": 0.006765409531672773,
						"alias": "qnli"
					},
					"race": {
						"acc,none": 0.33779904306220093,
						"acc_stderr,none": 0.014637734314782857,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5270758122743683,
						"acc_stderr,none": 0.030052303463143706,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.694,
						"acc_norm,none": 0.658,
						"acc_norm_stderr,none": 0.015008706182121734,
						"acc_stderr,none": 0.014580006055436962,
						"alias": "sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5270758122743683,
						"acc_stderr,none": 0.030052303463143706,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.5091743119266054,
						"acc_stderr,none": 0.016939001525351532,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.3254523642907128,
						"acc_norm,none": 0.389333200039988,
						"acc_norm_stderr,none": 0.0034474152348358844,
						"acc_stderr,none": 0.00331268964215577,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6015107650327776,
						"acc_stderr,none": 0.0027951016290707546,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5653044871794872,
						"acc_stderr,none": 0.004961388771194796,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.7010236140670923,
						"acc_stderr,none": 0.004609079637096439,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5406862745098039,
						"acc_stderr,none": 0.00493456175385121,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"wic": {
						"acc,none": 0.5047021943573667,
						"acc_stderr,none": 0.01980984521925977,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 1.4164979015899082,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 2.669367430043699,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 190.63280671822247,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.4846093133385951,
						"acc_stderr,none": 0.014045826789783673,
						"alias": "winogrande"
					},
					"wnli": {
						"acc,none": 0.43661971830985913,
						"acc_stderr,none": 0.0592793555841297,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.40384615384615385,
						"acc_stderr,none": 0.04834688952654018,
						"alias": "wsc"
					},
					"wsc273": {
						"acc,none": 0.5091575091575091,
						"acc_stderr,none": 0.03031186794526186,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5174545454545455,
						"acc_stderr,none": 0.006742013363910066,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.022378596989230774,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668093,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.022279694107843414,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.02235279165091416,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.02238235778196214,
						"alias": " - xcopa_zh"
					},
					"xstorycloze": {
						"acc,none": 0.4956380482522111,
						"acc_stderr,none": 0.0038728188326428896,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.48908007941760423,
						"acc_stderr,none": 0.012864056278255034,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.5771012574454004,
						"acc_stderr,none": 0.012713225009126207,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.49702183984116477,
						"acc_stderr,none": 0.012866897066011239,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.4930509596293845,
						"acc_stderr,none": 0.01286588257096072,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.4804765056254136,
						"acc_stderr,none": 0.012857312531836857,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.4804765056254136,
						"acc_stderr,none": 0.01285731253183685,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.46459298477829253,
						"acc_stderr,none": 0.01283482285286005,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.48444738583719393,
						"acc_stderr,none": 0.012860899111470791,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.46790205162144277,
						"acc_stderr,none": 0.012840584503982027,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5115817339510258,
						"acc_stderr,none": 0.012863672949335873,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5062872270019855,
						"acc_stderr,none": 0.012866108021218216,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.5185434929197572,
						"acc_stderr,none": 0.007490970617248932,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.5311827956989247,
						"acc_stderr,none": 0.010351557734314752,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.5060240963855421,
						"acc_stderr,none": 0.05521175536091375,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.4921793534932221,
						"acc_stderr,none": 0.016152290551844556,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.4828897338403042,
						"acc_stderr,none": 0.030872011014694032,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5142857142857142,
						"acc_stderr,none": 0.0282051130549725,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.5337301587301587,
						"acc_stderr,none": 0.022243111668199027,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "google/gemma-7b-it"
	},
	"huggyllama/llama-7b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"lambada_multilingual": {
						"acc,none": 0.4961769842810014,
						"acc_stderr,none": 0.07008621640376027,
						"alias": "lambada_multilingual",
						"perplexity,none": 34.70618327212021,
						"perplexity_stderr,none": 10.13400692835123
					},
					"pawsx": {
						"acc,none": 0.44671428571428573,
						"acc_stderr,none": 0.05195047972137848,
						"alias": "pawsx"
					},
					"xcopa": {
						"acc,none": 0.5418181818181819,
						"acc_stderr,none": 0.040717736708881784,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.38867469879518074,
						"acc_stderr,none": 0.05695391505524146,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5594729558991636,
						"acc_stderr,none": 0.0737928797468809,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7597212856821758,
						"acc_stderr,none": 0.07621979232315931,
						"alias": "xwinograd"
					}
				},
				"results": {
					"lambada_multilingual": {
						"acc,none": 0.4961769842810014,
						"acc_stderr,none": 0.07008621640376027,
						"alias": "lambada_multilingual",
						"perplexity,none": 34.70618327212021,
						"perplexity_stderr,none": 10.13400692835123
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3842421890161071,
						"acc_stderr,none": 0.006776720307079438,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 52.1774210359345,
						"perplexity_stderr,none": 3.066701318008749
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7308364059771008,
						"acc_stderr,none": 0.006179172491966779,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.500850622734743,
						"perplexity_stderr,none": 0.06865018892342145
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.42751795070832527,
						"acc_stderr,none": 0.0068923954478686475,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 44.71228931610228,
						"perplexity_stderr,none": 2.3667778615446906
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.4818552299631283,
						"acc_stderr,none": 0.006961389291072816,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 30.92265081458904,
						"perplexity_stderr,none": 1.6736871487566927
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.45643314574034544,
						"acc_stderr,none": 0.0069394834360396295,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 42.21770457124047,
						"perplexity_stderr,none": 2.4716268498273224
					},
					"paws_de": {
						"acc,none": 0.4075,
						"acc_stderr,none": 0.010990098549743105,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3345,
						"acc_stderr,none": 0.010552751076266157,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.398,
						"acc_stderr,none": 0.010947964603728239,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.517,
						"acc_stderr,none": 0.01117667029931067,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.441,
						"acc_stderr,none": 0.011105006104468736,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5085,
						"acc_stderr,none": 0.011181519941139164,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5205,
						"acc_stderr,none": 0.011173732641806813,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.44671428571428573,
						"acc_stderr,none": 0.05195047972137848,
						"alias": "pawsx"
					},
					"xcopa": {
						"acc,none": 0.5418181818181819,
						"acc_stderr,none": 0.040717736708881784,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.022380208834928028,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.02226169729227013,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.646,
						"acc_stderr,none": 0.02140758204791645,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.02229623834840706,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.02223197069632112,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.02224224437573102,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.02218721580302901,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.38867469879518074,
						"acc_stderr,none": 0.05695391505524146,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3397590361445783,
						"acc_stderr,none": 0.009493454925438252,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.38032128514056224,
						"acc_stderr,none": 0.009730746464767608,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.463855421686747,
						"acc_stderr,none": 0.00999585228282238,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3389558232931727,
						"acc_stderr,none": 0.009487992732201524,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5389558232931727,
						"acc_stderr,none": 0.009991608448389058,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.43253012048192774,
						"acc_stderr,none": 0.009930409027139452,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4779116465863454,
						"acc_stderr,none": 0.010012288645591786,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.009556642460138152,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4389558232931727,
						"acc_stderr,none": 0.009947100105978365,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3365461847389558,
						"acc_stderr,none": 0.009471423054177119,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3192771084337349,
						"acc_stderr,none": 0.009344511873557408,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3506024096385542,
						"acc_stderr,none": 0.009564237156206103,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.009476976849778591,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.37710843373493974,
						"acc_stderr,none": 0.009714644211180594,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3485943775100402,
						"acc_stderr,none": 0.009551542053301817,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5594729558991636,
						"acc_stderr,none": 0.0737928797468809,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.48444738583719393,
						"acc_stderr,none": 0.012860899111470791,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7657180675049636,
						"acc_stderr,none": 0.010899720775371961,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6578424884182661,
						"acc_stderr,none": 0.012209152707472842,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.4990072799470549,
						"acc_stderr,none": 0.012867099955422921,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5254798146922568,
						"acc_stderr,none": 0.012850407240776846,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5268034414295168,
						"acc_stderr,none": 0.012848623899505768,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.48378557246856385,
						"acc_stderr,none": 0.012860357805055867,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6221045665122436,
						"acc_stderr,none": 0.01247754207299466,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5076108537392455,
						"acc_stderr,none": 0.012865634571114485,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5314361350099271,
						"acc_stderr,none": 0.012841668760976905,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5499669093315684,
						"acc_stderr,none": 0.012802713598219839,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7597212856821758,
						"acc_stderr,none": 0.07621979232315931,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.006740838111907554,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.05066394254941721,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5922836287799792,
						"acc_stderr,none": 0.015876734592302294,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7300380228136882,
						"acc_stderr,none": 0.027426689796728774,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6317460317460317,
						"acc_stderr,none": 0.027219500732466696,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.628968253968254,
						"acc_stderr,none": 0.02153951426767635,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "huggyllama/llama-7b"
	},
	"m8than/Finch-14B-Continued": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6679819616685456,
						"acc_norm,none": 0.6857384441939121,
						"acc_norm_stderr,none": 0.09203102928251587,
						"acc_stderr,none": 0.1000866236459699,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4815625,
						"acc_stderr,none": 0.03504041277657958,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.7605,
						"acc_stderr,none": 0.16329646741215498,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8362985074626865,
						"acc_stderr,none": 0.14541278238199548,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.4658246656760773,
						"acc_norm,none": 0.4658246656760773,
						"acc_norm_stderr,none": 0.1655941019109346,
						"acc_stderr,none": 0.1655941019109346,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.47798307718874117,
						"acc_norm,none": 0.47798307718874117,
						"acc_norm_stderr,none": 0.10876334394374008,
						"acc_stderr,none": 0.10876334394374008,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.5013230471079306,
						"likelihood_diff_stderr,none": 0.5199985292745966,
						"pct_stereotype,none": 0.6435599284436494,
						"pct_stereotype_stderr,none": 0.06382219148610796
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05364173228346457,
						"exact_match_stderr,none": 0.004999472982618882
					},
					"glue": {
						"acc,none": 0.6997230824202001,
						"acc_stderr,none": 0.002740301592465893,
						"alias": "glue",
						"f1,none": 0.7136675673474895,
						"f1_stderr,none": 0.0001276043088632763,
						"mcc,none": -0.020702674026557004,
						"mcc_stderr,none": 0.013136740597627497
					},
					"kmmlu": {
						"acc,none": 0.26378862258157665,
						"acc_norm,none": 0.26378862258157665,
						"acc_norm_stderr,none": 0.03059794397022058,
						"acc_stderr,none": 0.03059794397022058,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5757509318131988,
						"acc_norm,none": 0.57,
						"acc_norm_stderr,none": 0.0004911823647294576,
						"acc_stderr,none": 0.04904560733275597,
						"alias": "kobest",
						"f1,none": 0.5480481140669374,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.737531535028139,
						"acc_stderr,none": 0.017546539835902136,
						"alias": "lambada",
						"perplexity,none": 3.2667711630550964,
						"perplexity_stderr,none": 0.16111136212074317
					},
					"lambada_cloze": {
						"acc,none": 0.4359596351639822,
						"acc_stderr,none": 0.06318492691814453,
						"alias": "lambada_cloze",
						"perplexity,none": 31.34043748843276,
						"perplexity_stderr,none": 8.446478870659627
					},
					"lambada_multilingual": {
						"acc,none": 0.5715117407335533,
						"acc_stderr,none": 0.08323791808036893,
						"alias": "lambada_multilingual",
						"perplexity,none": 16.468600759135704,
						"perplexity_stderr,none": 6.360462898503334
					},
					"mmlu": {
						"acc,none": 0.5463609172482552,
						"acc_stderr,none": 0.1295721449003838,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.4971307120085016,
						"acc_stderr,none": 0.15083506300955404,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.6224654007080784,
						"acc_stderr,none": 0.09331614160744865,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.6321091972700684,
						"acc_stderr,none": 0.0914384142367291,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.4611481129083413,
						"acc_stderr,none": 0.11255251016560976,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.49581263307310147,
						"acc_norm,none": 0.45648549500497193,
						"acc_norm_stderr,none": 0.00013227702108074642,
						"acc_stderr,none": 0.07139031605376193,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4318571428571429,
						"acc_stderr,none": 0.054966990474436,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.777957635680249,
						"acc_norm,none": 0.6884918020476908,
						"acc_norm_stderr,none": 0.010217932860076254,
						"acc_stderr,none": 0.14451462186074218,
						"alias": "pythia",
						"bits_per_byte,none": 0.6024995486201744,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5183449051589155,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 2.969721882668387,
						"perplexity_stderr,none": 0.054046275504081004,
						"word_perplexity,none": 9.329738186439503,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.39184397163120566,
						"acc_norm,none": 0.43439716312056736,
						"acc_norm_stderr,none": 0.05744400065088286,
						"acc_stderr,none": 0.04497298991866981,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.8692223220525107,
						"acc_stderr,none": 0.07213553503005979,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3613978891207816,
						"acc_stderr,none": 0.0013730746447301663,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.39167686658506734,
						"bleu_acc_stderr,none": 0.01708779588176963,
						"bleu_diff,none": -4.129732670442565,
						"bleu_diff_stderr,none": 0.8751876644851075,
						"bleu_max,none": 29.698614281423236,
						"bleu_max_stderr,none": 0.8265262334932252,
						"rouge1_acc,none": 0.3769889840881273,
						"rouge1_acc_stderr,none": 0.01696551757893035,
						"rouge1_diff,none": -5.990816050910162,
						"rouge1_diff_stderr,none": 0.9481270498020808,
						"rouge1_max,none": 55.796488975515565,
						"rouge1_max_stderr,none": 0.8350209856883201,
						"rouge2_acc,none": 0.3353733170134639,
						"rouge2_acc_stderr,none": 0.01652753403966899,
						"rouge2_diff,none": -6.8326456601135295,
						"rouge2_diff_stderr,none": 1.1465428234162014,
						"rouge2_max,none": 40.427882828940085,
						"rouge2_max_stderr,none": 1.013683962745416,
						"rougeL_acc,none": 0.3623011015911873,
						"rougeL_acc_stderr,none": 0.016826646897262258,
						"rougeL_diff,none": -6.085104490253012,
						"rougeL_diff_stderr,none": 0.9618831496331566,
						"rougeL_max,none": 52.8970220039349,
						"rougeL_max_stderr,none": 0.8537721571868255
					},
					"xcopa": {
						"acc,none": 0.6459999999999999,
						"acc_stderr,none": 0.07994088005356557,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4456760374832664,
						"acc_stderr,none": 0.049113375932080386,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6614523795198846,
						"acc_stderr,none": 0.057876366852052726,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8291750955270848,
						"acc_stderr,none": 0.03417489931662725,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6679819616685456,
						"acc_norm,none": 0.6857384441939121,
						"acc_norm_stderr,none": 0.09203102928251587,
						"acc_stderr,none": 0.1000866236459699,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4815625,
						"acc_stderr,none": 0.03504041277657958,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.545,
						"acc_stderr,none": 0.01575510149834709,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.425,
						"acc_stderr,none": 0.015640320317040098,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.47583333333333333,
						"acc_stderr,none": 0.014422898235552775,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4564846416382253,
						"acc_norm,none": 0.49146757679180886,
						"acc_norm_stderr,none": 0.014609263165632182,
						"acc_stderr,none": 0.014555949760496435,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7723063973063973,
						"acc_norm,none": 0.7815656565656566,
						"acc_norm_stderr,none": 0.008478350908240555,
						"acc_stderr,none": 0.008604753300503776,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.7605,
						"acc_stderr,none": 0.16329646741215498,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.459,
						"acc_stderr,none": 0.011145474902641254,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.9915,
						"acc_stderr,none": 0.002053285901060999,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.759,
						"acc_stderr,none": 0.009565837790089923,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.005818283785886307,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.006136515983374211,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.7245,
						"acc_stderr,none": 0.009992487172868913,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.008826916632019004,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.5465,
						"acc_stderr,none": 0.011134669525078666,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.4725,
						"acc_stderr,none": 0.01116620871686354,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.052928416485900215,
						"acc_stderr,none": 0.004664387427691272,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8362985074626865,
						"acc_stderr,none": 0.14541278238199548,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785134,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319414,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578154,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.011800434324644605,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942335,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.762,
						"acc_stderr,none": 0.013473586661967218,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.01552498067712258,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.013807775152234197,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904612,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437603,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565743,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.0076870078762864245,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.007088105617246442,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.00655881224140611,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491116,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592076,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.0040899544896890894,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280308,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.012259457340938605,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.01297583802196877,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.01312750285969623,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571403,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.011884495834541667,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469293,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.314,
						"acc_stderr,none": 0.01468399195108796,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151103,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.01281855355784399,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.014414290540008206,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.843,
						"acc_stderr,none": 0.01151014697923018,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.00896305396259208,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.00843458014024065,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140931,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832023,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.014078856992462618,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963528045,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.0155944601441406,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.605,
						"acc_stderr,none": 0.01546655146482935,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.712,
						"acc_stderr,none": 0.014326941797231561,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.00828206451270417,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.01251081614126435,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696863,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248104,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.795,
						"acc_stderr,none": 0.012772554096113112,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163045,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329824,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.010878848714333304,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.773,
						"acc_stderr,none": 0.013253174964763907,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.671,
						"acc_stderr,none": 0.014865395385928367,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.0042063872496114554,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987295,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.015090650341444233,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.501,
						"acc_stderr,none": 0.01581926829057682,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366658,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745904,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.649,
						"acc_stderr,none": 0.015100563798316407,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695792,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996693,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.831,
						"acc_stderr,none": 0.011856625977890105,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.845,
						"acc_stderr,none": 0.011450157470799454,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792944,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697055,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656804,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274701,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.015039986742055238,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.273,
						"acc_stderr,none": 0.014095022868717586,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7896024464831805,
						"acc_stderr,none": 0.007128811399547075,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.9464285714285714,
						"acc_stderr,none": 0.03036191711884682,
						"alias": "cb",
						"f1,none": 0.8895421177056115,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.4658246656760773,
						"acc_norm,none": 0.4658246656760773,
						"acc_norm_stderr,none": 0.1655941019109346,
						"acc_stderr,none": 0.1655941019109346,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.5510204081632653,
						"acc_norm,none": 0.5510204081632653,
						"acc_norm_stderr,none": 0.07179207795648103,
						"acc_stderr,none": 0.07179207795648103,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5151515151515151,
						"acc_norm,none": 0.5151515151515151,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275463,
						"acc_stderr,none": 0.08124094920275463,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.5652173913043478,
						"acc_norm,none": 0.5652173913043478,
						"acc_norm_stderr,none": 0.10568965974008646,
						"acc_stderr,none": 0.10568965974008646,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.425531914893617,
						"acc_norm,none": 0.425531914893617,
						"acc_norm_stderr,none": 0.07289875413448858,
						"acc_stderr,none": 0.07289875413448858,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271772,
						"acc_stderr,none": 0.10163945352271772,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.10094660663590604,
						"acc_stderr,none": 0.10094660663590604,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.38181818181818183,
						"acc_norm,none": 0.38181818181818183,
						"acc_norm_stderr,none": 0.06611340675536795,
						"acc_stderr,none": 0.06611340675536795,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.5675675675675675,
						"acc_norm,none": 0.5675675675675675,
						"acc_norm_stderr,none": 0.08256893144064577,
						"acc_stderr,none": 0.08256893144064577,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.08539125638299665,
						"acc_stderr,none": 0.08539125638299665,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.07802030664724673,
						"acc_stderr,none": 0.07802030664724673,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.45161290322580644,
						"acc_norm,none": 0.45161290322580644,
						"acc_norm_stderr,none": 0.09085862440549508,
						"acc_stderr,none": 0.09085862440549508,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.45161290322580644,
						"acc_norm,none": 0.45161290322580644,
						"acc_norm_stderr,none": 0.09085862440549508,
						"acc_stderr,none": 0.09085862440549508,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.7,
						"acc_norm,none": 0.7,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.8421052631578947,
						"acc_norm,none": 0.8421052631578947,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.10154334054280735,
						"acc_stderr,none": 0.10154334054280735,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.6666666666666666,
						"acc_norm,none": 0.6666666666666666,
						"acc_norm_stderr,none": 0.09829463743659808,
						"acc_stderr,none": 0.09829463743659808,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.631578947368421,
						"acc_norm,none": 0.631578947368421,
						"acc_norm_stderr,none": 0.11369720523522561,
						"acc_stderr,none": 0.11369720523522561,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.1042572070285374,
						"acc_stderr,none": 0.1042572070285374,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.8571428571428571,
						"acc_norm,none": 0.8571428571428571,
						"acc_norm_stderr,none": 0.07824607964359515,
						"acc_stderr,none": 0.07824607964359515,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.11470786693528086,
						"acc_stderr,none": 0.11470786693528086,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.5833333333333334,
						"acc_norm,none": 0.5833333333333334,
						"acc_norm_stderr,none": 0.1486470975026408,
						"acc_stderr,none": 0.1486470975026408,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.5909090909090909,
						"acc_norm,none": 0.5909090909090909,
						"acc_norm_stderr,none": 0.10729033533674225,
						"acc_stderr,none": 0.10729033533674225,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.5789473684210527,
						"acc_norm,none": 0.5789473684210527,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.7142857142857143,
						"acc_norm,none": 0.7142857142857143,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.4782608695652174,
						"acc_norm,none": 0.4782608695652174,
						"acc_norm_stderr,none": 0.10649955403405124,
						"acc_stderr,none": 0.10649955403405124,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.5102040816326531,
						"acc_norm,none": 0.5102040816326531,
						"acc_norm_stderr,none": 0.07215375318230074,
						"acc_stderr,none": 0.07215375318230074,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5909090909090909,
						"acc_norm,none": 0.5909090909090909,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.3888888888888889,
						"acc_norm,none": 0.3888888888888889,
						"acc_norm_stderr,none": 0.11823563735376173,
						"acc_stderr,none": 0.11823563735376173,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.07142857142857147,
						"acc_stderr,none": 0.07142857142857147,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.75,
						"acc_norm,none": 0.75,
						"acc_norm_stderr,none": 0.06603381797442179,
						"acc_stderr,none": 0.06603381797442179,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.5869565217391305,
						"acc_norm,none": 0.5869565217391305,
						"acc_norm_stderr,none": 0.07339975224406145,
						"acc_stderr,none": 0.07339975224406145,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.391304347826087,
						"acc_norm,none": 0.391304347826087,
						"acc_norm_stderr,none": 0.10405096111532161,
						"acc_stderr,none": 0.10405096111532161,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.47798307718874117,
						"acc_norm,none": 0.47798307718874117,
						"acc_norm_stderr,none": 0.10876334394374008,
						"acc_stderr,none": 0.10876334394374008,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.44970414201183434,
						"acc_norm,none": 0.44970414201183434,
						"acc_norm_stderr,none": 0.038380172729489376,
						"acc_stderr,none": 0.038380172729489376,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.038610038610038595,
						"acc_stderr,none": 0.038610038610038595,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.31097560975609756,
						"acc_norm,none": 0.31097560975609756,
						"acc_norm_stderr,none": 0.03625656529444609,
						"acc_stderr,none": 0.03625656529444609,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.6375,
						"acc_norm,none": 0.6375,
						"acc_norm_stderr,none": 0.038123743406448904,
						"acc_stderr,none": 0.038123743406448904,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.03608541011573967,
						"acc_stderr,none": 0.03608541011573967,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.507177033492823,
						"acc_norm,none": 0.507177033492823,
						"acc_norm_stderr,none": 0.03466519051738992,
						"acc_stderr,none": 0.03466519051738992,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.45625,
						"acc_norm,none": 0.45625,
						"acc_norm_stderr,none": 0.039500492593059405,
						"acc_stderr,none": 0.039500492593059405,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.5801526717557252,
						"acc_norm,none": 0.5801526717557252,
						"acc_norm_stderr,none": 0.04328577215262972,
						"acc_stderr,none": 0.04328577215262972,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.41911764705882354,
						"acc_norm,none": 0.41911764705882354,
						"acc_norm_stderr,none": 0.042466374059928515,
						"acc_stderr,none": 0.042466374059928515,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.5981308411214953,
						"acc_norm,none": 0.5981308411214953,
						"acc_norm_stderr,none": 0.047619793135935784,
						"acc_stderr,none": 0.047619793135935784,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.6130030959752322,
						"acc_norm,none": 0.6130030959752322,
						"acc_norm_stderr,none": 0.027142956048365807,
						"acc_stderr,none": 0.027142956048365807,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.37254901960784315,
						"acc_norm,none": 0.37254901960784315,
						"acc_norm_stderr,none": 0.03393388584958404,
						"acc_stderr,none": 0.03393388584958404,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.5865921787709497,
						"acc_norm,none": 0.5865921787709497,
						"acc_norm_stderr,none": 0.03691029168738377,
						"acc_stderr,none": 0.03691029168738377,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.4388185654008439,
						"acc_norm,none": 0.4388185654008439,
						"acc_norm_stderr,none": 0.032302649315470375,
						"acc_stderr,none": 0.032302649315470375,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.043505468189990605,
						"acc_stderr,none": 0.043505468189990605,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.5981308411214953,
						"acc_norm,none": 0.5981308411214953,
						"acc_norm_stderr,none": 0.04761979313593578,
						"acc_stderr,none": 0.04761979313593578,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.4339622641509434,
						"acc_norm,none": 0.4339622641509434,
						"acc_norm_stderr,none": 0.04836754297823818,
						"acc_stderr,none": 0.04836754297823818,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.37962962962962965,
						"acc_norm,none": 0.37962962962962965,
						"acc_norm_stderr,none": 0.04691521224077742,
						"acc_stderr,none": 0.04691521224077742,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.04513676718168307,
						"acc_stderr,none": 0.04513676718168307,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.44339622641509435,
						"acc_norm,none": 0.44339622641509435,
						"acc_norm_stderr,none": 0.048481318229754794,
						"acc_stderr,none": 0.048481318229754794,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.43223443223443225,
						"acc_norm,none": 0.43223443223443225,
						"acc_norm_stderr,none": 0.030037221261675184,
						"acc_stderr,none": 0.030037221261675184,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.5098039215686274,
						"acc_norm,none": 0.5098039215686274,
						"acc_norm_stderr,none": 0.03508637358630572,
						"acc_stderr,none": 0.03508637358630572,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.6257309941520468,
						"acc_norm,none": 0.6257309941520468,
						"acc_norm_stderr,none": 0.03711601185389483,
						"acc_stderr,none": 0.03711601185389483,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.5714285714285714,
						"acc_norm,none": 0.5714285714285714,
						"acc_norm_stderr,none": 0.040955869934356876,
						"acc_stderr,none": 0.040955869934356876,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.34532374100719426,
						"acc_norm,none": 0.34532374100719426,
						"acc_norm_stderr,none": 0.04047501062151219,
						"acc_stderr,none": 0.04047501062151219,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.4528301886792453,
						"acc_norm,none": 0.4528301886792453,
						"acc_norm_stderr,none": 0.03960045781124923,
						"acc_stderr,none": 0.03960045781124923,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.5521472392638037,
						"acc_norm,none": 0.5521472392638037,
						"acc_norm_stderr,none": 0.03906947479456608,
						"acc_stderr,none": 0.03906947479456608,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.436046511627907,
						"acc_norm,none": 0.436046511627907,
						"acc_norm_stderr,none": 0.03792189197270774,
						"acc_stderr,none": 0.03792189197270774,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.4246031746031746,
						"acc_norm,none": 0.4246031746031746,
						"acc_norm_stderr,none": 0.031198842986009293,
						"acc_stderr,none": 0.031198842986009293,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.4444444444444444,
						"acc_norm,none": 0.4444444444444444,
						"acc_norm_stderr,none": 0.03540294377095368,
						"acc_stderr,none": 0.03540294377095368,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.6764705882352942,
						"acc_norm,none": 0.6764705882352942,
						"acc_norm_stderr,none": 0.030388353551886793,
						"acc_stderr,none": 0.030388353551886793,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.3217391304347826,
						"acc_norm,none": 0.3217391304347826,
						"acc_norm_stderr,none": 0.03086971229277426,
						"acc_stderr,none": 0.03086971229277426,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.45925925925925926,
						"acc_norm,none": 0.45925925925925926,
						"acc_norm_stderr,none": 0.04304979692464242,
						"acc_stderr,none": 0.04304979692464242,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.4825174825174825,
						"acc_norm,none": 0.4825174825174825,
						"acc_norm_stderr,none": 0.041933411464602666,
						"acc_stderr,none": 0.041933411464602666,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.4659090909090909,
						"acc_norm,none": 0.4659090909090909,
						"acc_norm_stderr,none": 0.037708491648233415,
						"acc_stderr,none": 0.037708491648233415,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.5436241610738255,
						"acc_norm,none": 0.5436241610738255,
						"acc_norm_stderr,none": 0.0409430168096717,
						"acc_stderr,none": 0.0409430168096717,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.4556213017751479,
						"acc_norm,none": 0.4556213017751479,
						"acc_norm_stderr,none": 0.038423589228359284,
						"acc_stderr,none": 0.038423589228359284,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.04015266082801938,
						"acc_stderr,none": 0.04015266082801938,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.5338983050847458,
						"acc_norm,none": 0.5338983050847458,
						"acc_norm_stderr,none": 0.046118660119488855,
						"acc_stderr,none": 0.046118660119488855,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.31097560975609756,
						"acc_norm,none": 0.31097560975609756,
						"acc_norm_stderr,none": 0.03625656529444609,
						"acc_stderr,none": 0.03625656529444609,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.33636363636363636,
						"acc_norm,none": 0.33636363636363636,
						"acc_norm_stderr,none": 0.04525393596302506,
						"acc_stderr,none": 0.04525393596302506,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.5664335664335665,
						"acc_norm,none": 0.5664335664335665,
						"acc_norm_stderr,none": 0.04158705287172622,
						"acc_stderr,none": 0.04158705287172622,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.49206349206349204,
						"acc_norm,none": 0.49206349206349204,
						"acc_norm_stderr,none": 0.044715725362943486,
						"acc_stderr,none": 0.044715725362943486,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.3945945945945946,
						"acc_norm,none": 0.3945945945945946,
						"acc_norm_stderr,none": 0.0360321188626959,
						"acc_stderr,none": 0.0360321188626959,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.5174418604651163,
						"acc_norm,none": 0.5174418604651163,
						"acc_norm_stderr,none": 0.03821268439351743,
						"acc_stderr,none": 0.03821268439351743,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.48175182481751827,
						"acc_norm,none": 0.48175182481751827,
						"acc_norm_stderr,none": 0.024676788941131345,
						"acc_stderr,none": 0.024676788941131345,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.794392523364486,
						"acc_norm,none": 0.794392523364486,
						"acc_norm_stderr,none": 0.027691547344010744,
						"acc_stderr,none": 0.027691547344010744,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.4878048780487805,
						"acc_norm,none": 0.4878048780487805,
						"acc_norm_stderr,none": 0.045254406451566295,
						"acc_stderr,none": 0.045254406451566295,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.4344262295081967,
						"acc_norm,none": 0.4344262295081967,
						"acc_norm_stderr,none": 0.04506194823469704,
						"acc_stderr,none": 0.04506194823469704,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.5285714285714286,
						"acc_norm,none": 0.5285714285714286,
						"acc_norm_stderr,none": 0.03452921053595503,
						"acc_stderr,none": 0.03452921053595503,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.5222222222222223,
						"acc_norm,none": 0.5222222222222223,
						"acc_norm_stderr,none": 0.03733482601727583,
						"acc_stderr,none": 0.03733482601727583,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.6084656084656085,
						"acc_norm,none": 0.6084656084656085,
						"acc_norm_stderr,none": 0.03559787315695781,
						"acc_stderr,none": 0.03559787315695781,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.39655172413793105,
						"acc_norm,none": 0.39655172413793105,
						"acc_norm_stderr,none": 0.04561640191490673,
						"acc_stderr,none": 0.04561640191490673,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.4689655172413793,
						"acc_norm,none": 0.4689655172413793,
						"acc_norm_stderr,none": 0.04158632762097828,
						"acc_stderr,none": 0.04158632762097828,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.6190476190476191,
						"acc_norm,none": 0.6190476190476191,
						"acc_norm_stderr,none": 0.04761904761904762,
						"acc_stderr,none": 0.04761904761904762,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.5142857142857142,
						"acc_norm,none": 0.5142857142857142,
						"acc_norm_stderr,none": 0.03788942763158507,
						"acc_stderr,none": 0.03788942763158507,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.33649289099526064,
						"acc_norm,none": 0.33649289099526064,
						"acc_norm_stderr,none": 0.03260626767859446,
						"acc_stderr,none": 0.03260626767859446,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.31648936170212766,
						"acc_norm,none": 0.31648936170212766,
						"acc_norm_stderr,none": 0.024017984685453637,
						"acc_stderr,none": 0.024017984685453637,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.5431034482758621,
						"acc_norm,none": 0.5431034482758621,
						"acc_norm_stderr,none": 0.03277511546446159,
						"acc_stderr,none": 0.03277511546446159,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.5172413793103449,
						"acc_norm,none": 0.5172413793103449,
						"acc_norm_stderr,none": 0.03799168868945867,
						"acc_stderr,none": 0.03799168868945867,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.5111111111111111,
						"acc_norm,none": 0.5111111111111111,
						"acc_norm_stderr,none": 0.04318275491977976,
						"acc_stderr,none": 0.04318275491977976,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.504424778761062,
						"acc_norm,none": 0.504424778761062,
						"acc_norm_stderr,none": 0.03333202806330513,
						"acc_stderr,none": 0.03333202806330513,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.4909090909090909,
						"acc_norm,none": 0.4909090909090909,
						"acc_norm_stderr,none": 0.03903698647748441,
						"acc_stderr,none": 0.03903698647748441,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.34054054054054056,
						"acc_norm,none": 0.34054054054054056,
						"acc_norm_stderr,none": 0.03493570809271873,
						"acc_stderr,none": 0.03493570809271873,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.5443786982248521,
						"acc_norm,none": 0.5443786982248521,
						"acc_norm_stderr,none": 0.03842358922835929,
						"acc_stderr,none": 0.03842358922835929,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.6708074534161491,
						"acc_norm,none": 0.6708074534161491,
						"acc_norm_stderr,none": 0.03715043857896318,
						"acc_stderr,none": 0.03715043857896318,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.575,
						"acc_norm,none": 0.575,
						"acc_norm_stderr,none": 0.0392039498715957,
						"acc_stderr,none": 0.0392039498715957,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.020702674026557004,
						"mcc_stderr,none": 0.013136740597627497
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.03265986323710906,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.5013230471079306,
						"likelihood_diff_stderr,none": 0.5199985292745966,
						"pct_stereotype,none": 0.6435599284436494,
						"pct_stereotype_stderr,none": 0.06382219148610796
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.689922480620155,
						"likelihood_diff_stderr,none": 0.08627195053515001,
						"pct_stereotype,none": 0.652355396541443,
						"pct_stereotype_stderr,none": 0.01163249484177215
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.248626373626373,
						"likelihood_diff_stderr,none": 0.3992155698624475,
						"pct_stereotype,none": 0.7252747252747253,
						"pct_stereotype_stderr,none": 0.047052133987784364
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.795454545454546,
						"likelihood_diff_stderr,none": 1.7814760803315288,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.069230769230769,
						"likelihood_diff_stderr,none": 0.6133409575248622,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.544921875,
						"likelihood_diff_stderr,none": 0.16200778096054566,
						"pct_stereotype,none": 0.63125,
						"pct_stereotype_stderr,none": 0.02701290980694682
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.592013888888889,
						"likelihood_diff_stderr,none": 0.24500278386851704,
						"pct_stereotype,none": 0.6111111111111112,
						"pct_stereotype_stderr,none": 0.03324708911809117
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.256944444444445,
						"likelihood_diff_stderr,none": 0.3492468360063434,
						"pct_stereotype,none": 0.7777777777777778,
						"pct_stereotype_stderr,none": 0.04933922619854288
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.4557086614173227,
						"likelihood_diff_stderr,none": 0.1403786474463893,
						"pct_stereotype,none": 0.562992125984252,
						"pct_stereotype_stderr,none": 0.02202884929608508
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.730855855855856,
						"likelihood_diff_stderr,none": 0.34139713229685376,
						"pct_stereotype,none": 0.7297297297297297,
						"pct_stereotype_stderr,none": 0.04234321361084539
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.89247311827957,
						"likelihood_diff_stderr,none": 0.43312061855529127,
						"pct_stereotype,none": 0.9032258064516129,
						"pct_stereotype_stderr,none": 0.03082364793244869
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.338157894736842,
						"likelihood_diff_stderr,none": 0.2535269648289541,
						"pct_stereotype,none": 0.7,
						"pct_stereotype_stderr,none": 0.03333333333333336
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.3091457960644006,
						"likelihood_diff_stderr,none": 0.07394041923744019,
						"pct_stereotype,none": 0.6332737030411449,
						"pct_stereotype_stderr,none": 0.011771444151889984
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.091666666666667,
						"likelihood_diff_stderr,none": 0.2753799440917766,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.049968779266390734
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.375,
						"likelihood_diff_stderr,none": 0.40082047263338644,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.13323467750529824
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.09469696969697,
						"likelihood_diff_stderr,none": 0.5666212056231777,
						"pct_stereotype,none": 0.7727272727272727,
						"pct_stereotype_stderr,none": 0.05197926135426052
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.8862928348909658,
						"likelihood_diff_stderr,none": 0.1437135836465973,
						"pct_stereotype,none": 0.6105919003115264,
						"pct_stereotype_stderr,none": 0.027258566978193188
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.4683794466403164,
						"likelihood_diff_stderr,none": 0.1893280466640097,
						"pct_stereotype,none": 0.45454545454545453,
						"pct_stereotype_stderr,none": 0.03136661633374339
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.532986111111111,
						"likelihood_diff_stderr,none": 0.44512548190071716,
						"pct_stereotype,none": 0.7361111111111112,
						"pct_stereotype_stderr,none": 0.05230618728513983
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.988858695652174,
						"likelihood_diff_stderr,none": 0.1271464106697947,
						"pct_stereotype,none": 0.5847826086956521,
						"pct_stereotype_stderr,none": 0.023000043064407873
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.348913043478261,
						"likelihood_diff_stderr,none": 0.2761603339789108,
						"pct_stereotype,none": 0.7652173913043478,
						"pct_stereotype_stderr,none": 0.039698395317531235
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.756868131868132,
						"likelihood_diff_stderr,none": 0.3209844123461135,
						"pct_stereotype,none": 0.8351648351648352,
						"pct_stereotype_stderr,none": 0.039110176747367435
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.8434311224489797,
						"likelihood_diff_stderr,none": 0.24760904409668885,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.031008683647302113
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05364173228346457,
						"exact_match_stderr,none": 0.004999472982618882
					},
					"glue": {
						"acc,none": 0.6997230824202001,
						"acc_stderr,none": 0.002740301592465893,
						"alias": "glue",
						"f1,none": 0.7136675673474895,
						"f1_stderr,none": 0.0001276043088632763,
						"mcc,none": -0.020702674026557004,
						"mcc_stderr,none": 0.013136740597627497
					},
					"hellaswag": {
						"acc,none": 0.5882294363672576,
						"acc_norm,none": 0.7897829117705636,
						"acc_norm_stderr,none": 0.004066299761478495,
						"acc_stderr,none": 0.004911481830909248,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.26378862258157665,
						"acc_norm,none": 0.26378862258157665,
						"acc_norm_stderr,none": 0.03059794397022058,
						"acc_stderr,none": 0.03059794397022058,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.040936018074033256,
						"acc_stderr,none": 0.040936018074033256,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.253,
						"acc_norm,none": 0.253,
						"acc_norm_stderr,none": 0.01375427861358708,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145162,
						"acc_stderr,none": 0.013979965645145162,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.251,
						"acc_norm,none": 0.251,
						"acc_norm_stderr,none": 0.013718133516888931,
						"acc_stderr,none": 0.013718133516888931,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.298,
						"acc_norm,none": 0.298,
						"acc_norm_stderr,none": 0.014470846741134708,
						"acc_stderr,none": 0.014470846741134708,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.26166666666666666,
						"acc_norm,none": 0.26166666666666666,
						"acc_norm_stderr,none": 0.017959201687318422,
						"acc_stderr,none": 0.017959201687318422,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.013607356839598121,
						"acc_stderr,none": 0.013607356839598121,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.343,
						"acc_norm,none": 0.343,
						"acc_norm_stderr,none": 0.015019206922356951,
						"acc_stderr,none": 0.015019206922356951,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.01391220865102135,
						"acc_stderr,none": 0.01391220865102135,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.19,
						"acc_norm,none": 0.19,
						"acc_norm_stderr,none": 0.027809473820460104,
						"acc_stderr,none": 0.027809473820460104,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.014095022868717591,
						"acc_stderr,none": 0.014095022868717591,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.35384615384615387,
						"acc_norm,none": 0.35384615384615387,
						"acc_norm_stderr,none": 0.04209983089826262,
						"acc_stderr,none": 0.04209983089826262,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.046056618647183814,
						"acc_stderr,none": 0.046056618647183814,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.013106173040661757,
						"acc_stderr,none": 0.013106173040661757,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986187,
						"acc_stderr,none": 0.014062601350986187,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.262,
						"acc_norm,none": 0.262,
						"acc_norm_stderr,none": 0.013912208651021355,
						"acc_stderr,none": 0.013912208651021355,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.232,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.013354937452281567,
						"acc_stderr,none": 0.013354937452281567,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.289,
						"acc_norm,none": 0.289,
						"acc_norm_stderr,none": 0.014341711358296184,
						"acc_stderr,none": 0.014341711358296184,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.251,
						"acc_norm,none": 0.251,
						"acc_norm_stderr,none": 0.013718133516888933,
						"acc_stderr,none": 0.013718133516888933,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259736,
						"acc_stderr,none": 0.013929286594259736,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.01386041525752791,
						"acc_stderr,none": 0.01386041525752791,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.042923469599092816,
						"acc_stderr,none": 0.042923469599092816,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440474,
						"acc_stderr,none": 0.013946271849440474,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.31,
						"acc_norm,none": 0.31,
						"acc_norm_stderr,none": 0.014632638658632902,
						"acc_stderr,none": 0.014632638658632902,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.293,
						"acc_norm,none": 0.293,
						"acc_norm_stderr,none": 0.014399942998441271,
						"acc_stderr,none": 0.014399942998441271,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.258,
						"acc_norm,none": 0.258,
						"acc_norm_stderr,none": 0.013842963108656603,
						"acc_stderr,none": 0.013842963108656603,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740668,
						"acc_stderr,none": 0.014142984975740668,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.01353152253451544,
						"acc_stderr,none": 0.01353152253451544,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.018444294148717368,
						"acc_stderr,none": 0.018444294148717368,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.233,
						"acc_norm,none": 0.233,
						"acc_norm_stderr,none": 0.013374972519220072,
						"acc_stderr,none": 0.013374972519220072,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796271,
						"acc_stderr,none": 0.013996674851796271,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.238,
						"acc_norm,none": 0.238,
						"acc_norm_stderr,none": 0.01347358666196722,
						"acc_stderr,none": 0.01347358666196722,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.286,
						"acc_norm,none": 0.286,
						"acc_norm_stderr,none": 0.014297146862517911,
						"acc_stderr,none": 0.014297146862517911,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.045604802157206845,
						"acc_stderr,none": 0.045604802157206845,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.02433737233777908,
						"acc_stderr,none": 0.02433737233777908,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750638,
						"acc_stderr,none": 0.013626065817750638,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.238,
						"acc_norm,none": 0.238,
						"acc_norm_stderr,none": 0.013473586661967225,
						"acc_stderr,none": 0.013473586661967225,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.013550631705555963,
						"acc_stderr,none": 0.013550631705555963,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.02752568467055655,
						"acc_stderr,none": 0.02752568467055655,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881418,
						"acc_stderr,none": 0.013588548437881418,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.283,
						"acc_norm,none": 0.283,
						"acc_norm_stderr,none": 0.014251810906481753,
						"acc_stderr,none": 0.014251810906481753,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.02808592343999731,
						"acc_stderr,none": 0.02808592343999731,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.317,
						"acc_norm,none": 0.317,
						"acc_norm_stderr,none": 0.014721675438880226,
						"acc_stderr,none": 0.014721675438880226,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5757509318131988,
						"acc_norm,none": 0.57,
						"acc_norm_stderr,none": 0.0004911823647294576,
						"acc_stderr,none": 0.04904560733275597,
						"alias": "kobest",
						"f1,none": 0.5480481140669374,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.6111111111111112,
						"acc_stderr,none": 0.01301499549049922,
						"alias": " - kobest_boolq",
						"f1,none": 0.567463747672516,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.01507060460376841,
						"alias": " - kobest_copa",
						"f1,none": 0.6509822642826482,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.436,
						"acc_norm,none": 0.57,
						"acc_norm_stderr,none": 0.02216263442665284,
						"acc_stderr,none": 0.0221989546414768,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.43194061358391506,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5692695214105793,
						"acc_stderr,none": 0.024883655207256227,
						"alias": " - kobest_sentineg",
						"f1,none": 0.4832185133026301,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.5333333333333333,
						"acc_stderr,none": 0.014060147909767737,
						"alias": " - kobest_wic",
						"f1,none": 0.5112206552947137,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.737531535028139,
						"acc_stderr,none": 0.017546539835902136,
						"alias": "lambada",
						"perplexity,none": 3.2667711630550964,
						"perplexity_stderr,none": 0.16111136212074317
					},
					"lambada_cloze": {
						"acc,none": 0.4359596351639822,
						"acc_stderr,none": 0.06318492691814453,
						"alias": "lambada_cloze",
						"perplexity,none": 31.34043748843276,
						"perplexity_stderr,none": 8.446478870659627
					},
					"lambada_multilingual": {
						"acc,none": 0.5715117407335533,
						"acc_stderr,none": 0.08323791808036893,
						"alias": "lambada_multilingual",
						"perplexity,none": 16.468600759135704,
						"perplexity_stderr,none": 6.360462898503334
					},
					"lambada_openai": {
						"acc,none": 0.7688725014554628,
						"acc_stderr,none": 0.005873068236013241,
						"alias": " - lambada_openai",
						"perplexity,none": 2.969721882668387,
						"perplexity_stderr,none": 0.054046275504081004
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.31030467688725016,
						"acc_stderr,none": 0.006445177376219966,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 48.13299506722326,
						"perplexity_stderr,none": 1.2485002874685776
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4583737628565884,
						"acc_stderr,none": 0.006941795175625934,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 27.10729054148428,
						"perplexity_stderr,none": 1.47418700731009
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7706190568600815,
						"acc_stderr,none": 0.005857477272420429,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 2.9694009181695655,
						"perplexity_stderr,none": 0.05402848680879551
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4859305259072385,
						"acc_stderr,none": 0.006963219279097554,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 22.517961990717673,
						"perplexity_stderr,none": 1.0729083951395673
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5874248010867456,
						"acc_stderr,none": 0.00685866784180708,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 12.961617868622099,
						"perplexity_stderr,none": 0.6113402023018842
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5552105569571124,
						"acc_stderr,none": 0.00692337994818462,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 16.786732476684897,
						"perplexity_stderr,none": 0.8727922745017282
					},
					"lambada_standard": {
						"acc,none": 0.7046380749078207,
						"acc_stderr,none": 0.006355831587333139,
						"alias": " - lambada_standard",
						"perplexity,none": 3.5637147754747622,
						"perplexity_stderr,none": 0.07002007080036594
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.5616145934407142,
						"acc_stderr,none": 0.006912884634249907,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 14.547879909642258,
						"perplexity_stderr,none": 0.34459139680629813
					},
					"logiqa": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.3118279569892473,
						"acc_norm_stderr,none": 0.018169767037546317,
						"acc_stderr,none": 0.017162894755127073,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2862595419847328,
						"acc_norm,none": 0.31361323155216286,
						"acc_norm_stderr,none": 0.011705596450174646,
						"acc_stderr,none": 0.011404127158026004,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.3082077051926298,
						"acc_norm,none": 0.31256281407035175,
						"acc_norm_stderr,none": 0.008485662512402367,
						"acc_stderr,none": 0.008452986917013952,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.49968227070535903,
						"acc_stderr,none": 0.005145894970144046,
						"alias": "mc_taco",
						"f1,none": 0.5435748792270532,
						"f1_stderr,none": 0.005905875847083911
					},
					"medmcqa": {
						"acc,none": 0.44728663638536936,
						"acc_norm,none": 0.44728663638536936,
						"acc_norm_stderr,none": 0.007688664840171975,
						"acc_stderr,none": 0.007688664840171975,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.013999873068392923,
						"acc_stderr,none": 0.013999873068392923,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.5463609172482552,
						"acc_stderr,none": 0.1295721449003838,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.5481481481481482,
						"acc_stderr,none": 0.04299268905480864,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.5394736842105263,
						"acc_stderr,none": 0.04056242252249033,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.6113207547169811,
						"acc_stderr,none": 0.030000485448675986,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.6458333333333334,
						"acc_stderr,none": 0.03999411135753543,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.05,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.5838150289017341,
						"acc_stderr,none": 0.03758517775404947,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04690650298201942,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.66,
						"acc_stderr,none": 0.04760952285695237,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.4723404255319149,
						"acc_stderr,none": 0.03263597118409769,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.37719298245614036,
						"acc_stderr,none": 0.04559522141958215,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.5172413793103449,
						"acc_stderr,none": 0.04164188720169375,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.36772486772486773,
						"acc_stderr,none": 0.02483383982556243,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.040735243221471276,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.7064516129032258,
						"acc_stderr,none": 0.02590608702131929,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.43842364532019706,
						"acc_stderr,none": 0.03491207857486519,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.05024183937956912,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.7272727272727273,
						"acc_stderr,none": 0.03477691162163659,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.7171717171717171,
						"acc_stderr,none": 0.03208779558786751,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.7409326424870466,
						"acc_stderr,none": 0.031618779179354115,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.5358974358974359,
						"acc_stderr,none": 0.02528558599001784,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.0279404571362284,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.5294117647058824,
						"acc_stderr,none": 0.03242225027115007,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.31125827814569534,
						"acc_stderr,none": 0.03780445850526733,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.7431192660550459,
						"acc_stderr,none": 0.01873249292834245,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.4398148148148148,
						"acc_stderr,none": 0.0338517797604481,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.7352941176470589,
						"acc_stderr,none": 0.030964517926923393,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.7510548523206751,
						"acc_stderr,none": 0.028146970599422644,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.6502242152466368,
						"acc_stderr,none": 0.03200736719484503,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.6793893129770993,
						"acc_stderr,none": 0.040933292298342784,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.4971307120085016,
						"acc_stderr,none": 0.15083506300955404,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.6942148760330579,
						"acc_stderr,none": 0.04205953933884122,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.7129629629629629,
						"acc_stderr,none": 0.043733130409147614,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.6748466257668712,
						"acc_stderr,none": 0.036803503712864616,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.44642857142857145,
						"acc_stderr,none": 0.04718471485219588,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.6699029126213593,
						"acc_stderr,none": 0.0465614711001235,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.7948717948717948,
						"acc_stderr,none": 0.026453508054040304,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.04512608598542129,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.7318007662835249,
						"acc_stderr,none": 0.015842430835269435,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.6127167630057804,
						"acc_stderr,none": 0.026226158605124655,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.22681564245810057,
						"acc_stderr,none": 0.014005843570897906,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.6045751633986928,
						"acc_stderr,none": 0.027996723180631445,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.6224654007080784,
						"acc_stderr,none": 0.09331614160744865,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.617363344051447,
						"acc_stderr,none": 0.027604689028581993,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.6172839506172839,
						"acc_stderr,none": 0.02704453813840259,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.4397163120567376,
						"acc_stderr,none": 0.02960991207559411,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.4198174706649283,
						"acc_stderr,none": 0.01260496081608737,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.5514705882352942,
						"acc_stderr,none": 0.030211479609121593,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.545751633986928,
						"acc_stderr,none": 0.020142974553795198,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.5818181818181818,
						"acc_stderr,none": 0.0472457740573157,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.6163265306122448,
						"acc_stderr,none": 0.03113088039623593,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.6321091972700684,
						"acc_stderr,none": 0.0914384142367291,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.7860696517412935,
						"acc_stderr,none": 0.02899690969332893,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.4611481129083413,
						"acc_stderr,none": 0.11255251016560976,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.039427724440366234,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.45180722891566266,
						"acc_stderr,none": 0.03874371556587953,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.7719298245614035,
						"acc_stderr,none": 0.032180937956023566,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7111563932755985,
						"acc_stderr,none": 0.004574998038141382,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7060618388934092,
						"acc_stderr,none": 0.004594629621210077,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7254901960784313,
						"acc_stderr,none": 0.022120630385010488,
						"alias": "mrpc",
						"f1,none": 0.8318318318318318,
						"f1_stderr,none": 0.015663790912352243
					},
					"multimedqa": {
						"acc,none": 0.49581263307310147,
						"acc_norm,none": 0.45648549500497193,
						"acc_norm_stderr,none": 0.00013227702108074642,
						"acc_stderr,none": 0.07139031605376193,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5713696369636964,
						"acc_stderr,none": 0.007108263771672479,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7294018072481887,
						"mrr_stderr,none": 0.010201528048474682,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.40632054176072235,
						"r@2_stderr,none": 0.01650968416729844
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6674191138410676,
						"mrr_stderr,none": 0.010423554947882096,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4582392776523702,
						"r@2_stderr,none": 0.01674859103843925
					},
					"openbookqa": {
						"acc,none": 0.33,
						"acc_norm,none": 0.444,
						"acc_norm_stderr,none": 0.02224224437573102,
						"acc_stderr,none": 0.02104961216613481,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4045,
						"acc_stderr,none": 0.010977254896490818,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3545,
						"acc_stderr,none": 0.010699164035359287,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.347,
						"acc_stderr,none": 0.010646697895969505,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5295,
						"acc_stderr,none": 0.011163654804511657,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.011160209457602894,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.425,
						"acc_stderr,none": 0.01105660998281834,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4305,
						"acc_stderr,none": 0.011074574398099852,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4318571428571429,
						"acc_stderr,none": 0.054966990474436,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7959738846572362,
						"acc_norm,none": 0.8052230685527747,
						"acc_norm_stderr,none": 0.00924000669331772,
						"acc_stderr,none": 0.009402378102942638,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2942463706233988,
						"acc_norm,none": 0.31586251067463705,
						"acc_norm_stderr,none": 0.0033962049262356198,
						"acc_stderr,none": 0.003329317923065537,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.019536923574747615,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.777957635680249,
						"acc_norm,none": 0.6884918020476908,
						"acc_norm_stderr,none": 0.010217932860076254,
						"acc_stderr,none": 0.14451462186074218,
						"alias": "pythia",
						"bits_per_byte,none": 0.6024995486201744,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5183449051589155,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 2.969721882668387,
						"perplexity_stderr,none": 0.054046275504081004,
						"word_perplexity,none": 9.329738186439503,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.39184397163120566,
						"acc_norm,none": 0.43439716312056736,
						"acc_norm_stderr,none": 0.05744400065088286,
						"acc_stderr,none": 0.04497298991866981,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.45,
						"acc_norm,none": 0.5416666666666666,
						"acc_norm_stderr,none": 0.04567549854280213,
						"acc_stderr,none": 0.04560517440787951,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.34375,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.0392039498715957,
						"acc_stderr,none": 0.03766668927755763,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.39436619718309857,
						"acc_norm,none": 0.39436619718309857,
						"acc_norm_stderr,none": 0.029051039507650152,
						"acc_stderr,none": 0.029051039507650152,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5028372688998719,
						"acc_stderr,none": 0.0067653016265068885,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.7195399455849617,
						"acc_stderr,none": 0.0022341712651426753,
						"alias": "qqp",
						"f1,none": 0.7125218669979464,
						"f1_stderr,none": 0.002589378725481325
					},
					"race": {
						"acc,none": 0.35406698564593303,
						"acc_stderr,none": 0.014800834711677318,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2773,
						"em_stderr,none": 0.004476882313343509,
						"f1,none": 0.28670857165753844,
						"f1_stderr,none": 0.004485287414058083
					},
					"rte": {
						"acc,none": 0.7689530685920578,
						"acc_stderr,none": 0.02537146112218076,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.955,
						"acc_norm,none": 0.956,
						"acc_norm_stderr,none": 0.006488921798427421,
						"acc_stderr,none": 0.006558812241406122,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.7653429602888087,
						"acc_stderr,none": 0.025508815854976198,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8038990825688074,
						"acc_stderr,none": 0.013453382863192793,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5862741177646706,
						"acc_norm,none": 0.7852644206737979,
						"acc_norm_stderr,none": 0.0029032917936492935,
						"acc_stderr,none": 0.003482069446218214,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.8692223220525107,
						"acc_stderr,none": 0.07213553503005979,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.9485176282051282,
						"acc_stderr,none": 0.0022116756734903505,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.9806425458599372,
						"acc_stderr,none": 0.0013871036984703581,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.6838235294117647,
						"acc_stderr,none": 0.0046042404694451615,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3613978891207816,
						"acc_stderr,none": 0.0013730746447301663,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.39167686658506734,
						"bleu_acc_stderr,none": 0.01708779588176963,
						"bleu_diff,none": -4.129732670442565,
						"bleu_diff_stderr,none": 0.8751876644851075,
						"bleu_max,none": 29.698614281423236,
						"bleu_max_stderr,none": 0.8265262334932252,
						"rouge1_acc,none": 0.3769889840881273,
						"rouge1_acc_stderr,none": 0.01696551757893035,
						"rouge1_diff,none": -5.990816050910162,
						"rouge1_diff_stderr,none": 0.9481270498020808,
						"rouge1_max,none": 55.796488975515565,
						"rouge1_max_stderr,none": 0.8350209856883201,
						"rouge2_acc,none": 0.3353733170134639,
						"rouge2_acc_stderr,none": 0.01652753403966899,
						"rouge2_diff,none": -6.8326456601135295,
						"rouge2_diff_stderr,none": 1.1465428234162014,
						"rouge2_max,none": 40.427882828940085,
						"rouge2_max_stderr,none": 1.013683962745416,
						"rougeL_acc,none": 0.3623011015911873,
						"rougeL_acc_stderr,none": 0.016826646897262258,
						"rougeL_diff,none": -6.085104490253012,
						"rougeL_diff_stderr,none": 0.9618831496331566,
						"rougeL_max,none": 52.8970220039349,
						"rougeL_max_stderr,none": 0.8537721571868255
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.39167686658506734,
						"bleu_acc_stderr,none": 0.01708779588176963,
						"bleu_diff,none": -4.129732670442565,
						"bleu_diff_stderr,none": 0.8751876644851075,
						"bleu_max,none": 29.698614281423236,
						"bleu_max_stderr,none": 0.8265262334932252,
						"rouge1_acc,none": 0.3769889840881273,
						"rouge1_acc_stderr,none": 0.01696551757893035,
						"rouge1_diff,none": -5.990816050910162,
						"rouge1_diff_stderr,none": 0.9481270498020808,
						"rouge1_max,none": 55.796488975515565,
						"rouge1_max_stderr,none": 0.8350209856883201,
						"rouge2_acc,none": 0.3353733170134639,
						"rouge2_acc_stderr,none": 0.01652753403966899,
						"rouge2_diff,none": -6.8326456601135295,
						"rouge2_diff_stderr,none": 1.1465428234162014,
						"rouge2_max,none": 40.427882828940085,
						"rouge2_max_stderr,none": 1.013683962745416,
						"rougeL_acc,none": 0.3623011015911873,
						"rougeL_acc_stderr,none": 0.016826646897262258,
						"rougeL_diff,none": -6.085104490253012,
						"rougeL_diff_stderr,none": 0.9618831496331566,
						"rougeL_max,none": 52.8970220039349,
						"rougeL_max_stderr,none": 0.8537721571868255
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2937576499388005,
						"acc_stderr,none": 0.015945068581236614,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4290381283027626,
						"acc_stderr,none": 0.014254204854931287,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.05364173228346457,
						"exact_match_stderr,none": 0.004999472982618882
					},
					"wic": {
						"acc,none": 0.5501567398119123,
						"acc_stderr,none": 0.019710793664739733,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6024995486201744,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5183449051589155,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 9.329738186439503,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7198105761641673,
						"acc_stderr,none": 0.012621707979798499,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.5673076923076923,
						"acc_stderr,none": 0.048818036870061955,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8498168498168498,
						"acc_stderr,none": 0.021661514699106647,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6459999999999999,
						"acc_stderr,none": 0.07994088005356557,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.628,
						"acc_stderr,none": 0.021637197985722396,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.022331264423258383,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.01968468882019472,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.018722956449139922,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.488,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.02206494331392886,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.02183468586936921,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.022033677993740865,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.67,
						"acc_stderr,none": 0.0210496121661348,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.01911886665375976,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.019827714859587568,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4456760374832664,
						"acc_stderr,none": 0.049113375932080386,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.336144578313253,
						"acc_stderr,none": 0.009468634669293529,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.46987951807228917,
						"acc_stderr,none": 0.01000387141951773,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.5008032128514056,
						"acc_stderr,none": 0.010022059935722388,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.40923694779116465,
						"acc_stderr,none": 0.009855567414480236,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5369477911646586,
						"acc_stderr,none": 0.009994672360002297,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4991967871485944,
						"acc_stderr,none": 0.0100220599357224,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4995983935742972,
						"acc_stderr,none": 0.010022069634353847,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.45140562248995986,
						"acc_stderr,none": 0.009974628047721973,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4883534136546185,
						"acc_stderr,none": 0.010019353650807708,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.41807228915662653,
						"acc_stderr,none": 0.009886618180256053,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3923694779116466,
						"acc_stderr,none": 0.009787120838990103,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.46184738955823296,
						"acc_stderr,none": 0.009992853579749952,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.44417670682730925,
						"acc_stderr,none": 0.009959414626897997,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41767068273092367,
						"acc_stderr,none": 0.009885277727840175,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.35943775100401604,
						"acc_stderr,none": 0.009617895762902742,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6614523795198846,
						"acc_stderr,none": 0.057876366852052726,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.6485771012574454,
						"acc_stderr,none": 0.012285910871738331,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7961614824619457,
						"acc_stderr,none": 0.010367050974022208,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7346128391793514,
						"acc_stderr,none": 0.011362678996097103,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5909993381866314,
						"acc_stderr,none": 0.012652228567132372,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6565188616810059,
						"acc_stderr,none": 0.012220432513619225,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6896095301125083,
						"acc_stderr,none": 0.01190604015249926,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5737921906022502,
						"acc_stderr,none": 0.012726223450627894,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.7174056915949703,
						"acc_stderr,none": 0.011587123627044829,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.57114493712773,
						"acc_stderr,none": 0.01273620271314777,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.628060886829914,
						"acc_stderr,none": 0.012437936235202025,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6690933156849769,
						"acc_stderr,none": 0.012108982233131473,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8291750955270848,
						"acc_stderr,none": 0.03417489931662725,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8808602150537634,
						"acc_stderr,none": 0.006719915957605397,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109368,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7643378519290928,
						"acc_stderr,none": 0.013712127574810636,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.8022813688212928,
						"acc_stderr,none": 0.024605744229700223,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.7047619047619048,
						"acc_stderr,none": 0.025742017645837025,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.8214285714285714,
						"acc_stderr,none": 0.01707681589442905,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "m8than/Finch-14B-Continued"
	},
	"m8than/Finch-14B-Continued-10": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6648816234498309,
						"acc_norm,none": 0.6674182638105975,
						"acc_norm_stderr,none": 0.08748073260793772,
						"acc_stderr,none": 0.0974454160901496,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.5459375,
						"acc_stderr,none": 0.046057318730907466,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8422089552238806,
						"acc_stderr,none": 0.13708920192147298,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.4614919702987394,
						"acc_norm,none": 0.4614919702987394,
						"acc_norm_stderr,none": 0.10426600918035533,
						"acc_stderr,none": 0.10426600918035533,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.6522451167222487,
						"acc_stderr,none": 0.006846274775420319,
						"alias": "glue",
						"f1,none": 0.6456216077148048,
						"f1_stderr,none": 0.0002505570191561242,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"lambada": {
						"acc,none": 0.7308364059771008,
						"acc_stderr,none": 0.017065519206547915,
						"alias": "lambada",
						"perplexity,none": 3.277432397804061,
						"perplexity_stderr,none": 0.14540231578208046
					},
					"lambada_multilingual": {
						"acc,none": 0.570230933436833,
						"acc_stderr,none": 0.08023321842466458,
						"alias": "lambada_multilingual",
						"perplexity,none": 16.57427443313553,
						"perplexity_stderr,none": 6.396109588907219
					},
					"mmlu": {
						"acc,none": 0.5616721264777097,
						"acc_stderr,none": 0.12922245420838252,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.5094580233793836,
						"acc_stderr,none": 0.1438564975883652,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.6298680399098808,
						"acc_stderr,none": 0.10072231796338442,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.6603834904127397,
						"acc_stderr,none": 0.09514680794625115,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.47605455122105933,
						"acc_stderr,none": 0.11287864111088165,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.43635714285714283,
						"acc_stderr,none": 0.05805845343398072,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.784099753392691,
						"acc_norm,none": 0.670651100498023,
						"acc_norm_stderr,none": 0.00951795224839781,
						"acc_stderr,none": 0.13957995862675346,
						"alias": "pythia",
						"bits_per_byte,none": 0.6043250981838578,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5202673962133642,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.015260109353011,
						"perplexity_stderr,none": 0.0548748835029478,
						"word_perplexity,none": 9.393082187547963,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.36363435639509223,
						"acc_stderr,none": 0.0014506877344568638,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.40514075887392903,
						"bleu_acc_stderr,none": 0.017185611727753375,
						"bleu_diff,none": -3.235801013605498,
						"bleu_diff_stderr,none": 0.9008915150930162,
						"bleu_max,none": 29.833947018007752,
						"bleu_max_stderr,none": 0.8225945427012528,
						"rouge1_acc,none": 0.39167686658506734,
						"rouge1_acc_stderr,none": 0.01708779588176963,
						"rouge1_diff,none": -4.628870584298668,
						"rouge1_diff_stderr,none": 1.0108822107962714,
						"rouge1_max,none": 56.180220037808176,
						"rouge1_max_stderr,none": 0.827339900885443,
						"rouge2_acc,none": 0.35128518971848227,
						"rouge2_acc_stderr,none": 0.0167113581635444,
						"rouge2_diff,none": -5.697926082201627,
						"rouge2_diff_stderr,none": 1.2038176987132096,
						"rouge2_max,none": 40.72200195164838,
						"rouge2_max_stderr,none": 1.0177720548890354,
						"rougeL_acc,none": 0.3843329253365973,
						"rougeL_acc_stderr,none": 0.017028707301245206,
						"rougeL_diff,none": -4.5739580594741875,
						"rougeL_diff_stderr,none": 1.0259473830736345,
						"rougeL_max,none": 53.16662505148674,
						"rougeL_max_stderr,none": 0.8487400784058506
					},
					"xcopa": {
						"acc,none": 0.642,
						"acc_stderr,none": 0.07867599327948176,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4448995983935743,
						"acc_stderr,none": 0.05048861837732623,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6613320498164972,
						"acc_stderr,none": 0.05929533575377494,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8336704877500561,
						"acc_stderr,none": 0.03551148334973733,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6648816234498309,
						"acc_norm,none": 0.6674182638105975,
						"acc_norm_stderr,none": 0.08748073260793772,
						"acc_stderr,none": 0.0974454160901496,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.5459375,
						"acc_stderr,none": 0.046057318730907466,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.639,
						"acc_stderr,none": 0.015195720118175115,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.01581613575277321,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.515,
						"acc_stderr,none": 0.014433275195211854,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4590443686006826,
						"acc_norm,none": 0.48293515358361777,
						"acc_norm_stderr,none": 0.014602878388536595,
						"acc_stderr,none": 0.01456229107360122,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7664141414141414,
						"acc_norm,none": 0.7584175084175084,
						"acc_norm_stderr,none": 0.008783247004042162,
						"acc_stderr,none": 0.008682068762796176,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8422089552238806,
						"acc_stderr,none": 0.13708920192147298,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.00927691010310331,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.003148000938676768,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298406,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.01162816469672718,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024963,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.013232501619085344,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.631,
						"acc_stderr,none": 0.015266698139154615,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.012975838021968769,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397219,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.00263779414624376,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998175,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.0065588122414061145,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.00619987406633706,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275289,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704163,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.00801893405031515,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319419,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295435,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.01081165537241605,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.012975838021968764,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.809,
						"acc_stderr,none": 0.012436787112179491,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118784,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.011628164696727178,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.001000000000000014,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.353,
						"acc_stderr,none": 0.01512017260548369,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525061,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.012864077288499318,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.014205696104091493,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.866,
						"acc_stderr,none": 0.010777762298369672,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343963,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400233,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697068,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557422,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.014580006055436967,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397241,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.015417317979911077,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.015090650341444238,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.674,
						"acc_stderr,none": 0.01483050720454104,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696862,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.01295371756673724,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653878,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366667,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427419,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318198,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042962,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.014013292702729482,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.721,
						"acc_stderr,none": 0.014190150117612035,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389642,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.00828206451270416,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578026,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.633,
						"acc_stderr,none": 0.015249378464171756,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.509,
						"acc_stderr,none": 0.015816736995005395,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.746,
						"acc_stderr,none": 0.01377220656516854,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.01447084674113471,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621219,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523736,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.013019735539307815,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.010811655372416051,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897877,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406731,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689096,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.0045364721513064974,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.419,
						"acc_stderr,none": 0.015610338967577795,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.014933117490932573,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.4614919702987394,
						"acc_norm,none": 0.4614919702987394,
						"acc_norm_stderr,none": 0.10426600918035533,
						"acc_stderr,none": 0.10426600918035533,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.41420118343195267,
						"acc_norm,none": 0.41420118343195267,
						"acc_norm_stderr,none": 0.03800364668244123,
						"acc_stderr,none": 0.03800364668244123,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.3108108108108108,
						"acc_norm,none": 0.3108108108108108,
						"acc_norm_stderr,none": 0.03817320450441154,
						"acc_stderr,none": 0.03817320450441154,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.3048780487804878,
						"acc_norm,none": 0.3048780487804878,
						"acc_norm_stderr,none": 0.03605784583600454,
						"acc_stderr,none": 0.03605784583600454,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.60625,
						"acc_norm,none": 0.60625,
						"acc_norm_stderr,none": 0.03874695666685832,
						"acc_stderr,none": 0.03874695666685832,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.03608541011573967,
						"acc_stderr,none": 0.03608541011573967,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.49282296650717705,
						"acc_norm,none": 0.49282296650717705,
						"acc_norm_stderr,none": 0.03466519051738992,
						"acc_stderr,none": 0.03466519051738992,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.425,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.0392039498715957,
						"acc_stderr,none": 0.0392039498715957,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.549618320610687,
						"acc_norm,none": 0.549618320610687,
						"acc_norm_stderr,none": 0.04363643698524779,
						"acc_stderr,none": 0.04363643698524779,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.40441176470588236,
						"acc_norm,none": 0.40441176470588236,
						"acc_norm_stderr,none": 0.04223943122454429,
						"acc_stderr,none": 0.04223943122454429,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.5700934579439252,
						"acc_norm,none": 0.5700934579439252,
						"acc_norm_stderr,none": 0.04808472349429953,
						"acc_stderr,none": 0.04808472349429953,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.5789473684210527,
						"acc_norm,none": 0.5789473684210527,
						"acc_norm_stderr,none": 0.027514384324943846,
						"acc_stderr,none": 0.027514384324943846,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.36764705882352944,
						"acc_norm,none": 0.36764705882352944,
						"acc_norm_stderr,none": 0.03384132045674119,
						"acc_stderr,none": 0.03384132045674119,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.5698324022346368,
						"acc_norm,none": 0.5698324022346368,
						"acc_norm_stderr,none": 0.03710927044282251,
						"acc_stderr,none": 0.03710927044282251,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.4430379746835443,
						"acc_norm,none": 0.4430379746835443,
						"acc_norm_stderr,none": 0.032335327775334835,
						"acc_stderr,none": 0.032335327775334835,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.043960933774393765,
						"acc_stderr,none": 0.043960933774393765,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.6261682242990654,
						"acc_norm,none": 0.6261682242990654,
						"acc_norm_stderr,none": 0.04699273118994851,
						"acc_stderr,none": 0.04699273118994851,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.41509433962264153,
						"acc_norm,none": 0.41509433962264153,
						"acc_norm_stderr,none": 0.04808633394970665,
						"acc_stderr,none": 0.04808633394970665,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.3611111111111111,
						"acc_norm,none": 0.3611111111111111,
						"acc_norm_stderr,none": 0.04643454608906274,
						"acc_stderr,none": 0.04643454608906274,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919942,
						"acc_stderr,none": 0.04336290903919942,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.37735849056603776,
						"acc_norm,none": 0.37735849056603776,
						"acc_norm_stderr,none": 0.04730439022852894,
						"acc_stderr,none": 0.04730439022852894,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.0300060018006002,
						"acc_stderr,none": 0.0300060018006002,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.5147058823529411,
						"acc_norm,none": 0.5147058823529411,
						"acc_norm_stderr,none": 0.03507793834791324,
						"acc_stderr,none": 0.03507793834791324,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.03829509868994727,
						"acc_stderr,none": 0.03829509868994727,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.5102040816326531,
						"acc_norm,none": 0.5102040816326531,
						"acc_norm_stderr,none": 0.04137167622853999,
						"acc_stderr,none": 0.04137167622853999,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.35251798561151076,
						"acc_norm,none": 0.35251798561151076,
						"acc_norm_stderr,none": 0.0406691364864082,
						"acc_stderr,none": 0.0406691364864082,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.5031446540880503,
						"acc_norm,none": 0.5031446540880503,
						"acc_norm_stderr,none": 0.03977707748639468,
						"acc_stderr,none": 0.03977707748639468,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.5828220858895705,
						"acc_norm,none": 0.5828220858895705,
						"acc_norm_stderr,none": 0.03874102859818082,
						"acc_stderr,none": 0.03874102859818082,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.4186046511627907,
						"acc_norm,none": 0.4186046511627907,
						"acc_norm_stderr,none": 0.037725911890875034,
						"acc_stderr,none": 0.037725911890875034,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.031236022160528714,
						"acc_stderr,none": 0.031236022160528714,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.46464646464646464,
						"acc_norm,none": 0.46464646464646464,
						"acc_norm_stderr,none": 0.035534363688280626,
						"acc_stderr,none": 0.035534363688280626,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.6554621848739496,
						"acc_norm,none": 0.6554621848739496,
						"acc_norm_stderr,none": 0.03086868260412163,
						"acc_stderr,none": 0.03086868260412163,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.3391304347826087,
						"acc_norm,none": 0.3391304347826087,
						"acc_norm_stderr,none": 0.03128408938822598,
						"acc_stderr,none": 0.03128408938822598,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.4222222222222222,
						"acc_norm,none": 0.4222222222222222,
						"acc_norm_stderr,none": 0.042667634040995814,
						"acc_stderr,none": 0.042667634040995814,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.4755244755244755,
						"acc_norm,none": 0.4755244755244755,
						"acc_norm_stderr,none": 0.04190876649540685,
						"acc_stderr,none": 0.04190876649540685,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.4431818181818182,
						"acc_norm,none": 0.4431818181818182,
						"acc_norm_stderr,none": 0.03755161736785979,
						"acc_stderr,none": 0.03755161736785979,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.5100671140939598,
						"acc_norm,none": 0.5100671140939598,
						"acc_norm_stderr,none": 0.04109141532737571,
						"acc_stderr,none": 0.04109141532737571,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.40828402366863903,
						"acc_norm,none": 0.40828402366863903,
						"acc_norm_stderr,none": 0.0379212984888554,
						"acc_stderr,none": 0.0379212984888554,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.2803030303030303,
						"acc_norm,none": 0.2803030303030303,
						"acc_norm_stderr,none": 0.03924217639788229,
						"acc_stderr,none": 0.03924217639788229,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.5169491525423728,
						"acc_norm,none": 0.5169491525423728,
						"acc_norm_stderr,none": 0.04619845024855635,
						"acc_stderr,none": 0.04619845024855635,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.03494959016177541,
						"acc_stderr,none": 0.03494959016177541,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.39090909090909093,
						"acc_norm,none": 0.39090909090909093,
						"acc_norm_stderr,none": 0.04673752333670237,
						"acc_stderr,none": 0.04673752333670237,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.5384615384615384,
						"acc_norm,none": 0.5384615384615384,
						"acc_norm_stderr,none": 0.0418347444773734,
						"acc_stderr,none": 0.0418347444773734,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.4523809523809524,
						"acc_norm,none": 0.4523809523809524,
						"acc_norm_stderr,none": 0.044518079590553275,
						"acc_stderr,none": 0.044518079590553275,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.372972972972973,
						"acc_norm,none": 0.372972972972973,
						"acc_norm_stderr,none": 0.03565109718452138,
						"acc_stderr,none": 0.03565109718452138,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.4941860465116279,
						"acc_norm,none": 0.4941860465116279,
						"acc_norm_stderr,none": 0.038233370649948514,
						"acc_stderr,none": 0.038233370649948514,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.46715328467153283,
						"acc_norm,none": 0.46715328467153283,
						"acc_norm_stderr,none": 0.02463989889966437,
						"acc_stderr,none": 0.02463989889966437,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.780373831775701,
						"acc_norm,none": 0.780373831775701,
						"acc_norm_stderr,none": 0.02836635864201755,
						"acc_stderr,none": 0.02836635864201755,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.4796747967479675,
						"acc_norm,none": 0.4796747967479675,
						"acc_norm_stderr,none": 0.04523045598338889,
						"acc_stderr,none": 0.04523045598338889,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.4098360655737705,
						"acc_norm,none": 0.4098360655737705,
						"acc_norm_stderr,none": 0.04470938897168401,
						"acc_stderr,none": 0.04470938897168401,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.5142857142857142,
						"acc_norm,none": 0.5142857142857142,
						"acc_norm_stderr,none": 0.0345716036894725,
						"acc_stderr,none": 0.0345716036894725,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.4777777777777778,
						"acc_norm,none": 0.4777777777777778,
						"acc_norm_stderr,none": 0.03733482601727583,
						"acc_stderr,none": 0.03733482601727583,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.5925925925925926,
						"acc_norm,none": 0.5925925925925926,
						"acc_norm_stderr,none": 0.035835514581251615,
						"acc_stderr,none": 0.035835514581251615,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.31896551724137934,
						"acc_norm,none": 0.31896551724137934,
						"acc_norm_stderr,none": 0.043461778915984337,
						"acc_stderr,none": 0.043461778915984337,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.4482758620689655,
						"acc_norm,none": 0.4482758620689655,
						"acc_norm_stderr,none": 0.04144311810878151,
						"acc_stderr,none": 0.04144311810878151,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.5619047619047619,
						"acc_norm,none": 0.5619047619047619,
						"acc_norm_stderr,none": 0.048651804501824956,
						"acc_stderr,none": 0.048651804501824956,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.5085714285714286,
						"acc_norm,none": 0.5085714285714286,
						"acc_norm_stderr,none": 0.0378993320697706,
						"acc_stderr,none": 0.0378993320697706,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.33175355450236965,
						"acc_norm,none": 0.33175355450236965,
						"acc_norm_stderr,none": 0.032491254030336765,
						"acc_stderr,none": 0.032491254030336765,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.3271276595744681,
						"acc_norm,none": 0.3271276595744681,
						"acc_norm_stderr,none": 0.02422754101792965,
						"acc_stderr,none": 0.02422754101792965,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.5086206896551724,
						"acc_norm,none": 0.5086206896551724,
						"acc_norm_stderr,none": 0.0328926947316481,
						"acc_stderr,none": 0.0328926947316481,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.4942528735632184,
						"acc_norm,none": 0.4942528735632184,
						"acc_norm_stderr,none": 0.03801178479702085,
						"acc_stderr,none": 0.03801178479702085,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.42962962962962964,
						"acc_norm,none": 0.42962962962962964,
						"acc_norm_stderr,none": 0.04276349494376599,
						"acc_stderr,none": 0.04276349494376599,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.4911504424778761,
						"acc_norm,none": 0.4911504424778761,
						"acc_norm_stderr,none": 0.033328111946500955,
						"acc_stderr,none": 0.033328111946500955,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.4727272727272727,
						"acc_norm,none": 0.4727272727272727,
						"acc_norm_stderr,none": 0.03898531605579419,
						"acc_stderr,none": 0.03898531605579419,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.34054054054054056,
						"acc_norm,none": 0.34054054054054056,
						"acc_norm_stderr,none": 0.03493570809271873,
						"acc_stderr,none": 0.03493570809271873,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.5443786982248521,
						"acc_norm,none": 0.5443786982248521,
						"acc_norm_stderr,none": 0.038423589228359284,
						"acc_stderr,none": 0.038423589228359284,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.6459627329192547,
						"acc_norm,none": 0.6459627329192547,
						"acc_norm_stderr,none": 0.03780665290318812,
						"acc_stderr,none": 0.03780665290318812,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.55625,
						"acc_norm,none": 0.55625,
						"acc_norm_stderr,none": 0.039400853796259426,
						"acc_stderr,none": 0.039400853796259426,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.033799766898963086,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.6522451167222487,
						"acc_stderr,none": 0.006846274775420319,
						"alias": "glue",
						"f1,none": 0.6456216077148048,
						"f1_stderr,none": 0.0002505570191561242,
						"mcc,none": 0.0,
						"mcc_stderr,none": 0.0
					},
					"hellaswag": {
						"acc,none": 0.5891256721768572,
						"acc_norm,none": 0.7842063333997211,
						"acc_norm_stderr,none": 0.004105310748596489,
						"acc_stderr,none": 0.004909870006388839,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7308364059771008,
						"acc_stderr,none": 0.017065519206547915,
						"alias": "lambada",
						"perplexity,none": 3.277432397804061,
						"perplexity_stderr,none": 0.14540231578208046
					},
					"lambada_multilingual": {
						"acc,none": 0.570230933436833,
						"acc_stderr,none": 0.08023321842466458,
						"alias": "lambada_multilingual",
						"perplexity,none": 16.57427443313553,
						"perplexity_stderr,none": 6.396109588907219
					},
					"lambada_openai": {
						"acc,none": 0.7628565883951096,
						"acc_stderr,none": 0.005925691738606928,
						"alias": " - lambada_openai",
						"perplexity,none": 3.015260109353011,
						"perplexity_stderr,none": 0.0548748835029478
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.46031437997283137,
						"acc_stderr,none": 0.0069440008789686735,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 27.31172906921195,
						"perplexity_stderr,none": 1.4878292833817073
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7622744032602368,
						"acc_stderr,none": 0.0059306966971974595,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.0157965175769377,
						"perplexity_stderr,none": 0.05489109740466202
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.49039394527459734,
						"acc_stderr,none": 0.006964691949428186,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 22.615944887100966,
						"perplexity_stderr,none": 1.0817049125217812
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5862604308169999,
						"acc_stderr,none": 0.006861528841487097,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 13.102482530597442,
						"perplexity_stderr,none": 0.6224812834214482
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5519115078594993,
						"acc_stderr,none": 0.00692833203679387,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 16.825419161190336,
						"perplexity_stderr,none": 0.8769978333971412
					},
					"lambada_standard": {
						"acc,none": 0.6990102852707161,
						"acc_stderr,none": 0.006390424136449911,
						"alias": " - lambada_standard",
						"perplexity,none": 3.5402376059435974,
						"perplexity_stderr,none": 0.06884414208960295
					},
					"logiqa": {
						"acc,none": 0.24423963133640553,
						"acc_norm,none": 0.3010752688172043,
						"acc_norm_stderr,none": 0.017992688742668232,
						"acc_stderr,none": 0.016851689430077556,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.5616721264777097,
						"acc_stderr,none": 0.12922245420838252,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.5925925925925926,
						"acc_stderr,none": 0.04244633238353228,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.5592105263157895,
						"acc_stderr,none": 0.04040311062490436,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.04975698519562427,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.6113207547169811,
						"acc_stderr,none": 0.030000485448675986,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.04048439222695598,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.05016135580465919,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.5780346820809249,
						"acc_stderr,none": 0.03765746693865151,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.38235294117647056,
						"acc_stderr,none": 0.04835503696107223,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.43829787234042555,
						"acc_stderr,none": 0.03243618636108101,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.3508771929824561,
						"acc_stderr,none": 0.04489539350270698,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.5517241379310345,
						"acc_stderr,none": 0.041443118108781526,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.36507936507936506,
						"acc_stderr,none": 0.024796060602699958,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.36507936507936506,
						"acc_stderr,none": 0.04306241259127154,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.7129032258064516,
						"acc_stderr,none": 0.025736542745594528,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.4433497536945813,
						"acc_stderr,none": 0.03495334582162933,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.04975698519562428,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.7212121212121212,
						"acc_stderr,none": 0.0350143870629678,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.7323232323232324,
						"acc_stderr,none": 0.03154449888270286,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.7772020725388601,
						"acc_stderr,none": 0.030031147977641545,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.5743589743589743,
						"acc_stderr,none": 0.025069094387296535,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.027840811495871937,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.5756302521008403,
						"acc_stderr,none": 0.032104790510157764,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.3509933774834437,
						"acc_stderr,none": 0.03896981964257375,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.7798165137614679,
						"acc_stderr,none": 0.017765978652327576,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.49537037037037035,
						"acc_stderr,none": 0.03409825519163572,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.7401960784313726,
						"acc_stderr,none": 0.03077855467869326,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.7468354430379747,
						"acc_stderr,none": 0.028304657943035303,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.6502242152466368,
						"acc_stderr,none": 0.03200736719484503,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.6717557251908397,
						"acc_stderr,none": 0.04118438565806298,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.5094580233793836,
						"acc_stderr,none": 0.1438564975883652,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.6942148760330579,
						"acc_stderr,none": 0.04205953933884122,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.6851851851851852,
						"acc_stderr,none": 0.04489931073591312,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.6625766871165644,
						"acc_stderr,none": 0.037149084099355745,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.4642857142857143,
						"acc_stderr,none": 0.04733667890053756,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.6990291262135923,
						"acc_stderr,none": 0.04541609446503948,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.8076923076923077,
						"acc_stderr,none": 0.02581923325648375,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.045126085985421296,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.7484035759897829,
						"acc_stderr,none": 0.015517322365529622,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.6329479768786127,
						"acc_stderr,none": 0.025950054337654085,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24022346368715083,
						"acc_stderr,none": 0.014288343803925302,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.6339869281045751,
						"acc_stderr,none": 0.02758281141515962,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.6298680399098808,
						"acc_stderr,none": 0.10072231796338442,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.639871382636656,
						"acc_stderr,none": 0.027264297599804015,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.6234567901234568,
						"acc_stderr,none": 0.026959344518747787,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.40425531914893614,
						"acc_stderr,none": 0.029275532159704725,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.43415906127770537,
						"acc_stderr,none": 0.01265903323706725,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.5845588235294118,
						"acc_stderr,none": 0.02993534270787776,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.5702614379084967,
						"acc_stderr,none": 0.020027122784928547,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.6454545454545455,
						"acc_stderr,none": 0.04582004841505415,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.6285714285714286,
						"acc_stderr,none": 0.030932858792789855,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.6603834904127397,
						"acc_stderr,none": 0.09514680794625115,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.8258706467661692,
						"acc_stderr,none": 0.026814951200421606,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.47605455122105933,
						"acc_stderr,none": 0.11287864111088165,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.03775251680686371,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.463855421686747,
						"acc_stderr,none": 0.03882310850890594,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.8011695906432749,
						"acc_stderr,none": 0.030611116557432528,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.801426388181355,
						"acc_stderr,none": 0.004026888084487691,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7915988608624899,
						"acc_stderr,none": 0.004096413384733941,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6887254901960784,
						"acc_stderr,none": 0.022950790715623736,
						"alias": " - mrpc",
						"f1,none": 0.8140556368960469,
						"f1_stderr,none": 0.01619265753417425
					},
					"openbookqa": {
						"acc,none": 0.338,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.022270877485360437,
						"acc_stderr,none": 0.021175665695209407,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.416,
						"acc_stderr,none": 0.011024190055654281,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.336,
						"acc_stderr,none": 0.010564459470410665,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.351,
						"acc_stderr,none": 0.010675039964286672,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5415,
						"acc_stderr,none": 0.011144549137930353,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.011174185930778312,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.4495,
						"acc_stderr,none": 0.011125950223877365,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4405,
						"acc_stderr,none": 0.011103671499120343,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.43635714285714283,
						"acc_stderr,none": 0.05805845343398072,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.8030467899891186,
						"acc_norm,none": 0.8035908596300326,
						"acc_norm_stderr,none": 0.00926923223767992,
						"acc_stderr,none": 0.009278918898006378,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.784099753392691,
						"acc_norm,none": 0.670651100498023,
						"acc_norm_stderr,none": 0.00951795224839781,
						"acc_stderr,none": 0.13957995862675346,
						"alias": "pythia",
						"bits_per_byte,none": 0.6043250981838578,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5202673962133642,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.015260109353011,
						"perplexity_stderr,none": 0.0548748835029478,
						"word_perplexity,none": 9.393082187547963,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6018550581251546,
						"acc_stderr,none": 0.0024345576278988323,
						"alias": " - qqp",
						"f1,none": 0.6441629639454429,
						"f1_stderr,none": 0.0026231073767726413
					},
					"record": {
						"alias": "record",
						"em,none": 0.272,
						"em_stderr,none": 0.004450121386888205,
						"f1,none": 0.2822200002551079,
						"f1_stderr,none": 0.004461487034085861
					},
					"rte": {
						"acc,none": 0.7545126353790613,
						"acc_stderr,none": 0.025905578160457157,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.948,
						"acc_norm,none": 0.945,
						"acc_norm_stderr,none": 0.007212976294639235,
						"acc_stderr,none": 0.0070246242138171456,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.6869266055045872,
						"acc_stderr,none": 0.015713364044401386,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.36363435639509223,
						"acc_stderr,none": 0.0014506877344568638,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.40514075887392903,
						"bleu_acc_stderr,none": 0.017185611727753375,
						"bleu_diff,none": -3.235801013605498,
						"bleu_diff_stderr,none": 0.9008915150930162,
						"bleu_max,none": 29.833947018007752,
						"bleu_max_stderr,none": 0.8225945427012528,
						"rouge1_acc,none": 0.39167686658506734,
						"rouge1_acc_stderr,none": 0.01708779588176963,
						"rouge1_diff,none": -4.628870584298668,
						"rouge1_diff_stderr,none": 1.0108822107962714,
						"rouge1_max,none": 56.180220037808176,
						"rouge1_max_stderr,none": 0.827339900885443,
						"rouge2_acc,none": 0.35128518971848227,
						"rouge2_acc_stderr,none": 0.0167113581635444,
						"rouge2_diff,none": -5.697926082201627,
						"rouge2_diff_stderr,none": 1.2038176987132096,
						"rouge2_max,none": 40.72200195164838,
						"rouge2_max_stderr,none": 1.0177720548890354,
						"rougeL_acc,none": 0.3843329253365973,
						"rougeL_acc_stderr,none": 0.017028707301245206,
						"rougeL_diff,none": -4.5739580594741875,
						"rougeL_diff_stderr,none": 1.0259473830736345,
						"rougeL_max,none": 53.16662505148674,
						"rougeL_max_stderr,none": 0.8487400784058506
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.40514075887392903,
						"bleu_acc_stderr,none": 0.017185611727753375,
						"bleu_diff,none": -3.235801013605498,
						"bleu_diff_stderr,none": 0.9008915150930162,
						"bleu_max,none": 29.833947018007752,
						"bleu_max_stderr,none": 0.8225945427012528,
						"rouge1_acc,none": 0.39167686658506734,
						"rouge1_acc_stderr,none": 0.01708779588176963,
						"rouge1_diff,none": -4.628870584298668,
						"rouge1_diff_stderr,none": 1.0108822107962714,
						"rouge1_max,none": 56.180220037808176,
						"rouge1_max_stderr,none": 0.827339900885443,
						"rouge2_acc,none": 0.35128518971848227,
						"rouge2_acc_stderr,none": 0.0167113581635444,
						"rouge2_diff,none": -5.697926082201627,
						"rouge2_diff_stderr,none": 1.2038176987132096,
						"rouge2_max,none": 40.72200195164838,
						"rouge2_max_stderr,none": 1.0177720548890354,
						"rougeL_acc,none": 0.3843329253365973,
						"rougeL_acc_stderr,none": 0.017028707301245206,
						"rougeL_diff,none": -4.5739580594741875,
						"rougeL_diff_stderr,none": 1.0259473830736345,
						"rougeL_max,none": 53.16662505148674,
						"rougeL_max_stderr,none": 0.8487400784058506
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2937576499388005,
						"acc_stderr,none": 0.015945068581236614,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4335110628513839,
						"acc_stderr,none": 0.014301717526831369,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6043250981838578,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5202673962133642,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 9.393082187547963,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7521704814522494,
						"acc_stderr,none": 0.012134386019865353,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5211267605633803,
						"acc_stderr,none": 0.05970805879899504,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.642,
						"acc_stderr,none": 0.07867599327948176,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.021728881438701705,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.022324981738385256,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.019827714859587574,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.01872295644913993,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.0219308441207285,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.02206494331392886,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.668,
						"acc_stderr,none": 0.021081766571222856,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.752,
						"acc_stderr,none": 0.019332342821239107,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.01992048320956607,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4448995983935743,
						"acc_stderr,none": 0.05048861837732623,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3329317269076305,
						"acc_stderr,none": 0.009446051001358226,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4759036144578313,
						"acc_stderr,none": 0.010010427753210668,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4963855421686747,
						"acc_stderr,none": 0.010021811000966357,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.40883534136546185,
						"acc_stderr,none": 0.009854078067810773,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5365461847389559,
						"acc_stderr,none": 0.009995265580368928,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4975903614457831,
						"acc_stderr,none": 0.01002195648306808,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5096385542168674,
						"acc_stderr,none": 0.010020210558438302,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.44216867469879517,
						"acc_stderr,none": 0.00995481026510205,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4883534136546185,
						"acc_stderr,none": 0.010019353650807713,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.41767068273092367,
						"acc_stderr,none": 0.009885277727840171,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.39598393574297186,
						"acc_stderr,none": 0.009802809888502344,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4678714859437751,
						"acc_stderr,none": 0.01000136106817308,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.43333333333333335,
						"acc_stderr,none": 0.009932588282324245,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42008032128514056,
						"acc_stderr,none": 0.009893219469115705,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3502008032128514,
						"acc_stderr,none": 0.00956171303816195,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6613320498164972,
						"acc_stderr,none": 0.05929533575377494,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.6439444076770351,
						"acc_stderr,none": 0.01232238063722049,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7961614824619457,
						"acc_stderr,none": 0.010367050974022208,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7405691594970218,
						"acc_stderr,none": 0.011279897124457369,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5949702183984117,
						"acc_stderr,none": 0.012632887218751382,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6432825943084051,
						"acc_stderr,none": 0.012327487677110359,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6929185969556585,
						"acc_stderr,none": 0.011870783739438458,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5704831237590999,
						"acc_stderr,none": 0.012738639381354,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.7240238252812706,
						"acc_stderr,none": 0.011503334549850882,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.57180675049636,
						"acc_stderr,none": 0.012733742799515153,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.6161482461945731,
						"acc_stderr,none": 0.01251514539172887,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6803441429516877,
						"acc_stderr,none": 0.012000993063297277,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8336704877500561,
						"acc_stderr,none": 0.03551148334973733,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8881720430107527,
						"acc_stderr,none": 0.006537409396036432,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109368,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7674661105318039,
						"acc_stderr,none": 0.013648658797468531,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7756653992395437,
						"acc_stderr,none": 0.025771203207084706,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.707936507936508,
						"acc_stderr,none": 0.025660845825774617,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.8333333333333334,
						"acc_stderr,none": 0.016616890547541164,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "m8than/Finch-14B-Continued-10"
	},
	"m8than/Finch-14B-Final": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.669391206313416,
						"acc_norm,none": 0.673055242390079,
						"acc_norm_stderr,none": 0.08572890812112886,
						"acc_stderr,none": 0.09954835203616853,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.549375,
						"acc_stderr,none": 0.049757995234602295,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.8031,
						"acc_stderr,none": 0.1341587469282346,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.844044776119403,
						"acc_stderr,none": 0.13675854244649382,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.45022288261515603,
						"acc_norm,none": 0.45022288261515603,
						"acc_norm_stderr,none": 0.16461613915036744,
						"acc_stderr,none": 0.16461613915036744,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.4650319461232948,
						"acc_norm,none": 0.4650319461232948,
						"acc_norm_stderr,none": 0.10315820056159335,
						"acc_stderr,none": 0.10315820056159335,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.5687239117471674,
						"likelihood_diff_stderr,none": 0.5098347881240998,
						"pct_stereotype,none": 0.6405784138342278,
						"pct_stereotype_stderr,none": 0.06739247701417342
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.012303149606299213,
						"exact_match_stderr,none": 0.0024460482822194203
					},
					"glue": {
						"acc,none": 0.654508099094807,
						"acc_stderr,none": 0.0069565388780035795,
						"alias": "glue",
						"f1,none": 0.6461024462989778,
						"f1_stderr,none": 0.0002526292050369486,
						"mcc,none": 0.1646951294632758,
						"mcc_stderr,none": 0.032336357657722976
					},
					"kmmlu": {
						"acc,none": 0.27126768697660997,
						"acc_norm,none": 0.27126768697660997,
						"acc_norm_stderr,none": 0.029454766992594274,
						"acc_stderr,none": 0.029454766992594274,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5768471826353869,
						"acc_norm,none": 0.58,
						"acc_norm_stderr,none": 0.0004881763527054103,
						"acc_stderr,none": 0.05909407697651153,
						"alias": "kobest",
						"f1,none": 0.5359541396406512,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7378226275955754,
						"acc_stderr,none": 0.01516374973808694,
						"alias": "lambada",
						"perplexity,none": 3.2182590216973956,
						"perplexity_stderr,none": 0.13595244439971646
					},
					"lambada_cloze": {
						"acc,none": 0.43421307975936346,
						"acc_stderr,none": 0.05220703656714691,
						"alias": "lambada_cloze",
						"perplexity,none": 29.17497178553312,
						"perplexity_stderr,none": 6.26454494206546
					},
					"lambada_multilingual": {
						"acc,none": 0.5713953037065786,
						"acc_stderr,none": 0.0817126929746517,
						"alias": "lambada_multilingual",
						"perplexity,none": 16.40384721660657,
						"perplexity_stderr,none": 6.333659640117861
					},
					"mmlu": {
						"acc,none": 0.5621706309642501,
						"acc_stderr,none": 0.13001464375283633,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.5126461211477152,
						"acc_stderr,none": 0.14336172282320195,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.6337302864499517,
						"acc_stderr,none": 0.10048153423166517,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.6600584985375366,
						"acc_stderr,none": 0.09437634218056394,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.47002854424357754,
						"acc_stderr,none": 0.11529740884949935,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.49325762952448543,
						"acc_norm,none": 0.45377947222593956,
						"acc_norm_stderr,none": 0.00013529425302545396,
						"acc_stderr,none": 0.06287909434422387,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.43785714285714283,
						"acc_stderr,none": 0.06032802635145791,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7857064973692255,
						"acc_norm,none": 0.6759789873550113,
						"acc_norm_stderr,none": 0.00922296264668544,
						"acc_stderr,none": 0.13904160115792802,
						"alias": "pythia",
						"bits_per_byte,none": 0.6044402430736675,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5203887371777813,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 2.975408349533753,
						"perplexity_stderr,none": 0.053701156407645174,
						"word_perplexity,none": 9.397091941429304,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.35815602836879434,
						"acc_norm,none": 0.40070921985815605,
						"acc_norm_stderr,none": 0.057445725297582795,
						"acc_stderr,none": 0.04074111048823162,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.865661708429004,
						"acc_stderr,none": 0.06824739752554036,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3649662990739864,
						"acc_stderr,none": 0.0013719614766716114,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.40024479804161567,
						"bleu_acc_stderr,none": 0.017151605555749138,
						"bleu_diff,none": -3.7574288715127366,
						"bleu_diff_stderr,none": 0.8900981650904477,
						"bleu_max,none": 29.434185881090134,
						"bleu_max_stderr,none": 0.8185610204170272,
						"rouge1_acc,none": 0.39167686658506734,
						"rouge1_acc_stderr,none": 0.017087795881769636,
						"rouge1_diff,none": -4.933360551806529,
						"rouge1_diff_stderr,none": 0.9697129576394805,
						"rouge1_max,none": 55.8658926752527,
						"rouge1_max_stderr,none": 0.8275214809853042,
						"rouge2_acc,none": 0.33047735618115054,
						"rouge2_acc_stderr,none": 0.016466769613698303,
						"rouge2_diff,none": -6.361848550974198,
						"rouge2_diff_stderr,none": 1.1761054306532206,
						"rouge2_max,none": 40.22209187835054,
						"rouge2_max_stderr,none": 1.0131925597036415,
						"rougeL_acc,none": 0.386780905752754,
						"rougeL_acc_stderr,none": 0.017048857010515107,
						"rougeL_diff,none": -4.992825170607286,
						"rougeL_diff_stderr,none": 0.9914374728544091,
						"rougeL_max,none": 52.78305385104008,
						"rougeL_max_stderr,none": 0.8481490715097342
					},
					"xcopa": {
						"acc,none": 0.6447272727272727,
						"acc_stderr,none": 0.078655925202766,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.445140562248996,
						"acc_stderr,none": 0.05170020859669855,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.658684796341977,
						"acc_stderr,none": 0.059368537675491516,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8356934142503933,
						"acc_stderr,none": 0.03541716419371988,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.669391206313416,
						"acc_norm,none": 0.673055242390079,
						"acc_norm_stderr,none": 0.08572890812112886,
						"acc_stderr,none": 0.09954835203616853,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.549375,
						"acc_stderr,none": 0.049757995234602295,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.015090650341444233,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.483,
						"acc_stderr,none": 0.015810153729833437,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.5208333333333334,
						"acc_stderr,none": 0.014427234584862746,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4590443686006826,
						"acc_norm,none": 0.492320819112628,
						"acc_norm_stderr,none": 0.01460966744089257,
						"acc_stderr,none": 0.01456229107360122,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7731481481481481,
						"acc_norm,none": 0.7622053872053872,
						"acc_norm_stderr,none": 0.008735850753507992,
						"acc_stderr,none": 0.008593512587705302,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.8031,
						"acc_stderr,none": 0.1341587469282346,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.5845,
						"acc_stderr,none": 0.011022278362940806,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0014117352790976798,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.00886153278963026,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.9415,
						"acc_stderr,none": 0.005249061947211399,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.005270046175636957,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.7755,
						"acc_stderr,none": 0.00933238563877715,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.8235,
						"acc_stderr,none": 0.00852702938396813,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.6245,
						"acc_stderr,none": 0.010830906206990816,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.5395,
						"acc_stderr,none": 0.011148184426533288,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.05422993492407809,
						"acc_stderr,none": 0.004718142854713632,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.844044776119403,
						"acc_stderr,none": 0.13675854244649382,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651526,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426574,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329842,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.838,
						"acc_stderr,none": 0.01165726777130442,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662739,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.785,
						"acc_stderr,none": 0.012997843819031832,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.628,
						"acc_stderr,none": 0.015292149942040577,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.013106173040661747,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.00982000165134571,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298198,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.002987963843142672,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.0058939578161655605,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033839,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.0055683935750813415,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406725,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.00785529793869759,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745029997,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.00727440148169706,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504417,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.012953717566737221,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099182,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584933,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.011828605831454264,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.001413505570557794,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.365,
						"acc_stderr,none": 0.015231776226264912,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662739,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.012795613612786564,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.727,
						"acc_stderr,none": 0.014095022868717602,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.864,
						"acc_stderr,none": 0.010845350230472986,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.010605256784796596,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996698,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571413,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406726,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.677,
						"acc_stderr,none": 0.014794927843348639,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.00973955126578513,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.604,
						"acc_stderr,none": 0.015473313265859408,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.653,
						"acc_stderr,none": 0.015060472031706627,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.687,
						"acc_stderr,none": 0.01467127282297788,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696862,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.772,
						"acc_stderr,none": 0.013273740700804476,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151132,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523715,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099189,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666691,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099203,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381782,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.756,
						"acc_stderr,none": 0.013588548437881416,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.014385511563477341,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.005568393575081369,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.00828206451270416,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578026,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.662,
						"acc_stderr,none": 0.014965960710224485,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.015803979428161946,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.736,
						"acc_stderr,none": 0.013946271849440481,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.00828206451270416,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.717,
						"acc_stderr,none": 0.014251810906481744,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524306,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796391,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877364,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.866,
						"acc_stderr,none": 0.010777762298369678,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666692,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571408,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745029963,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656798,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.406,
						"acc_stderr,none": 0.015537226438634602,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.334,
						"acc_stderr,none": 0.014922019523732961,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6253822629969419,
						"acc_stderr,none": 0.008465633983431928,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.9464285714285714,
						"acc_stderr,none": 0.03036191711884682,
						"alias": "cb",
						"f1,none": 0.9052631578947369,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.45022288261515603,
						"acc_norm,none": 0.45022288261515603,
						"acc_norm_stderr,none": 0.16461613915036744,
						"acc_stderr,none": 0.16461613915036744,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.5102040816326531,
						"acc_norm,none": 0.5102040816326531,
						"acc_norm_stderr,none": 0.07215375318230074,
						"acc_stderr,none": 0.07215375318230074,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.48484848484848486,
						"acc_norm,none": 0.48484848484848486,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122592,
						"acc_stderr,none": 0.08503766788122592,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.391304347826087,
						"acc_norm,none": 0.391304347826087,
						"acc_norm_stderr,none": 0.10405096111532161,
						"acc_stderr,none": 0.10405096111532161,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.40425531914893614,
						"acc_norm,none": 0.40425531914893614,
						"acc_norm_stderr,none": 0.07235674844413013,
						"acc_stderr,none": 0.07235674844413013,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271772,
						"acc_stderr,none": 0.10163945352271772,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.32727272727272727,
						"acc_norm,none": 0.32727272727272727,
						"acc_norm_stderr,none": 0.06385244698698629,
						"acc_stderr,none": 0.06385244698698629,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.5135135135135135,
						"acc_norm,none": 0.5135135135135135,
						"acc_norm_stderr,none": 0.08330289193201319,
						"acc_stderr,none": 0.08330289193201319,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.11180339887498948,
						"acc_stderr,none": 0.11180339887498948,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.4827586206896552,
						"acc_norm,none": 0.4827586206896552,
						"acc_norm_stderr,none": 0.09443492370778725,
						"acc_stderr,none": 0.09443492370778725,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.35135135135135137,
						"acc_norm,none": 0.35135135135135137,
						"acc_norm_stderr,none": 0.0795654132101608,
						"acc_stderr,none": 0.0795654132101608,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.45161290322580644,
						"acc_norm,none": 0.45161290322580644,
						"acc_norm_stderr,none": 0.09085862440549507,
						"acc_stderr,none": 0.09085862440549507,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.3870967741935484,
						"acc_norm,none": 0.3870967741935484,
						"acc_norm_stderr,none": 0.08892934678767887,
						"acc_stderr,none": 0.08892934678767887,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.85,
						"acc_norm,none": 0.85,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.7894736842105263,
						"acc_norm,none": 0.7894736842105263,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.5789473684210527,
						"acc_norm,none": 0.5789473684210527,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.6666666666666666,
						"acc_norm,none": 0.6666666666666666,
						"acc_norm_stderr,none": 0.09829463743659808,
						"acc_stderr,none": 0.09829463743659808,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.6842105263157895,
						"acc_norm,none": 0.6842105263157895,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.8095238095238095,
						"acc_norm,none": 0.8095238095238095,
						"acc_norm_stderr,none": 0.08780518530755131,
						"acc_stderr,none": 0.08780518530755131,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.45,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.11413288653790232,
						"acc_stderr,none": 0.11413288653790232,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.1486470975026408,
						"acc_stderr,none": 0.1486470975026408,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.5909090909090909,
						"acc_norm,none": 0.5909090909090909,
						"acc_norm_stderr,none": 0.10729033533674225,
						"acc_stderr,none": 0.10729033533674225,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.7142857142857143,
						"acc_norm,none": 0.7142857142857143,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.5217391304347826,
						"acc_norm,none": 0.5217391304347826,
						"acc_norm_stderr,none": 0.10649955403405124,
						"acc_stderr,none": 0.10649955403405124,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.4897959183673469,
						"acc_norm,none": 0.4897959183673469,
						"acc_norm_stderr,none": 0.07215375318230076,
						"acc_stderr,none": 0.07215375318230076,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5909090909090909,
						"acc_norm,none": 0.5909090909090909,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.11433239009500591,
						"acc_stderr,none": 0.11433239009500591,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.4827586206896552,
						"acc_norm,none": 0.4827586206896552,
						"acc_norm_stderr,none": 0.09443492370778725,
						"acc_stderr,none": 0.09443492370778725,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.32653061224489793,
						"acc_norm,none": 0.32653061224489793,
						"acc_norm_stderr,none": 0.06768622021133469,
						"acc_stderr,none": 0.06768622021133469,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.7045454545454546,
						"acc_norm,none": 0.7045454545454546,
						"acc_norm_stderr,none": 0.06957698714453994,
						"acc_stderr,none": 0.06957698714453994,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.5652173913043478,
						"acc_norm,none": 0.5652173913043478,
						"acc_norm_stderr,none": 0.07389883353033022,
						"acc_stderr,none": 0.07389883353033022,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.391304347826087,
						"acc_norm,none": 0.391304347826087,
						"acc_norm_stderr,none": 0.10405096111532161,
						"acc_stderr,none": 0.10405096111532161,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.4650319461232948,
						"acc_norm,none": 0.4650319461232948,
						"acc_norm_stderr,none": 0.10315820056159335,
						"acc_stderr,none": 0.10315820056159335,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.4378698224852071,
						"acc_norm,none": 0.4378698224852071,
						"acc_norm_stderr,none": 0.03827686117539366,
						"acc_stderr,none": 0.03827686117539366,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.3108108108108108,
						"acc_norm,none": 0.3108108108108108,
						"acc_norm_stderr,none": 0.03817320450441154,
						"acc_stderr,none": 0.03817320450441154,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.3048780487804878,
						"acc_norm,none": 0.3048780487804878,
						"acc_norm_stderr,none": 0.03605784583600454,
						"acc_stderr,none": 0.03605784583600454,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.6,
						"acc_norm,none": 0.6,
						"acc_norm_stderr,none": 0.038851434494290536,
						"acc_stderr,none": 0.038851434494290536,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3151515151515151,
						"acc_norm,none": 0.3151515151515151,
						"acc_norm_stderr,none": 0.0362773057502241,
						"acc_stderr,none": 0.0362773057502241,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.4688995215311005,
						"acc_norm,none": 0.4688995215311005,
						"acc_norm_stderr,none": 0.034601631258720345,
						"acc_stderr,none": 0.034601631258720345,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.45,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.03945381823835187,
						"acc_stderr,none": 0.03945381823835187,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.5419847328244275,
						"acc_norm,none": 0.5419847328244275,
						"acc_norm_stderr,none": 0.04369802690578756,
						"acc_stderr,none": 0.04369802690578756,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.4117647058823529,
						"acc_norm,none": 0.4117647058823529,
						"acc_norm_stderr,none": 0.04235778234253509,
						"acc_stderr,none": 0.04235778234253509,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.5607476635514018,
						"acc_norm,none": 0.5607476635514018,
						"acc_norm_stderr,none": 0.048204529006379074,
						"acc_stderr,none": 0.048204529006379074,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.5851393188854489,
						"acc_norm,none": 0.5851393188854489,
						"acc_norm_stderr,none": 0.027456984787147014,
						"acc_stderr,none": 0.027456984787147014,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.37254901960784315,
						"acc_norm,none": 0.37254901960784315,
						"acc_norm_stderr,none": 0.03393388584958404,
						"acc_stderr,none": 0.03393388584958404,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.5754189944134078,
						"acc_norm,none": 0.5754189944134078,
						"acc_norm_stderr,none": 0.03704779597999959,
						"acc_stderr,none": 0.03704779597999959,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.4472573839662447,
						"acc_norm,none": 0.4472573839662447,
						"acc_norm_stderr,none": 0.03236564251614192,
						"acc_stderr,none": 0.03236564251614192,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.29245283018867924,
						"acc_norm,none": 0.29245283018867924,
						"acc_norm_stderr,none": 0.04439263906199628,
						"acc_stderr,none": 0.04439263906199628,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.6261682242990654,
						"acc_norm,none": 0.6261682242990654,
						"acc_norm_stderr,none": 0.04699273118994851,
						"acc_stderr,none": 0.04699273118994851,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.41509433962264153,
						"acc_norm,none": 0.41509433962264153,
						"acc_norm_stderr,none": 0.04808633394970665,
						"acc_stderr,none": 0.04808633394970665,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.37037037037037035,
						"acc_norm,none": 0.37037037037037035,
						"acc_norm_stderr,none": 0.04668408033024931,
						"acc_stderr,none": 0.04668408033024931,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714283,
						"acc_stderr,none": 0.04285714285714283,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.3584905660377358,
						"acc_norm,none": 0.3584905660377358,
						"acc_norm_stderr,none": 0.04679998780012862,
						"acc_stderr,none": 0.04679998780012862,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.43956043956043955,
						"acc_norm,none": 0.43956043956043955,
						"acc_norm_stderr,none": 0.030094646016767413,
						"acc_stderr,none": 0.030094646016767413,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.03509312031717982,
						"acc_stderr,none": 0.03509312031717982,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.543859649122807,
						"acc_norm,none": 0.543859649122807,
						"acc_norm_stderr,none": 0.03820042586602966,
						"acc_stderr,none": 0.03820042586602966,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.5170068027210885,
						"acc_norm,none": 0.5170068027210885,
						"acc_norm_stderr,none": 0.041356350546877384,
						"acc_stderr,none": 0.041356350546877384,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.3381294964028777,
						"acc_norm,none": 0.3381294964028777,
						"acc_norm_stderr,none": 0.04027063698740207,
						"acc_stderr,none": 0.04027063698740207,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.5031446540880503,
						"acc_norm,none": 0.5031446540880503,
						"acc_norm_stderr,none": 0.03977707748639468,
						"acc_stderr,none": 0.03977707748639468,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.5766871165644172,
						"acc_norm,none": 0.5766871165644172,
						"acc_norm_stderr,none": 0.03881891213334382,
						"acc_stderr,none": 0.03881891213334382,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.4186046511627907,
						"acc_norm,none": 0.4186046511627907,
						"acc_norm_stderr,none": 0.037725911890875034,
						"acc_stderr,none": 0.037725911890875034,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.4246031746031746,
						"acc_norm,none": 0.4246031746031746,
						"acc_norm_stderr,none": 0.031198842986009293,
						"acc_stderr,none": 0.031198842986009293,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.4797979797979798,
						"acc_norm,none": 0.4797979797979798,
						"acc_norm_stderr,none": 0.03559443565563918,
						"acc_stderr,none": 0.03559443565563918,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.6638655462184874,
						"acc_norm,none": 0.6638655462184874,
						"acc_norm_stderr,none": 0.030684737115135363,
						"acc_stderr,none": 0.030684737115135363,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.0314735003381084,
						"acc_stderr,none": 0.0314735003381084,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.43703703703703706,
						"acc_norm,none": 0.43703703703703706,
						"acc_norm_stderr,none": 0.042849586397533994,
						"acc_stderr,none": 0.042849586397533994,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.4825174825174825,
						"acc_norm,none": 0.4825174825174825,
						"acc_norm_stderr,none": 0.041933411464602666,
						"acc_stderr,none": 0.041933411464602666,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.44886363636363635,
						"acc_norm,none": 0.44886363636363635,
						"acc_norm_stderr,none": 0.03759825773425829,
						"acc_stderr,none": 0.03759825773425829,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.5100671140939598,
						"acc_norm,none": 0.5100671140939598,
						"acc_norm_stderr,none": 0.04109141532737571,
						"acc_stderr,none": 0.04109141532737571,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.4260355029585799,
						"acc_norm,none": 0.4260355029585799,
						"acc_norm_stderr,none": 0.03815142551613446,
						"acc_stderr,none": 0.03815142551613446,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.2878787878787879,
						"acc_norm,none": 0.2878787878787879,
						"acc_norm_stderr,none": 0.03955907664235389,
						"acc_stderr,none": 0.03955907664235389,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.5169491525423728,
						"acc_norm,none": 0.5169491525423728,
						"acc_norm_stderr,none": 0.04619845024855635,
						"acc_stderr,none": 0.04619845024855635,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2926829268292683,
						"acc_norm,none": 0.2926829268292683,
						"acc_norm_stderr,none": 0.035637888362588285,
						"acc_stderr,none": 0.035637888362588285,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.34545454545454546,
						"acc_norm,none": 0.34545454545454546,
						"acc_norm_stderr,none": 0.04554619617541054,
						"acc_stderr,none": 0.04554619617541054,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.5314685314685315,
						"acc_norm,none": 0.5314685314685315,
						"acc_norm_stderr,none": 0.04187588397445898,
						"acc_stderr,none": 0.04187588397445898,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.49206349206349204,
						"acc_norm,none": 0.49206349206349204,
						"acc_norm_stderr,none": 0.044715725362943486,
						"acc_stderr,none": 0.044715725362943486,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.3945945945945946,
						"acc_norm,none": 0.3945945945945946,
						"acc_norm_stderr,none": 0.0360321188626959,
						"acc_stderr,none": 0.0360321188626959,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.5116279069767442,
						"acc_norm,none": 0.5116279069767442,
						"acc_norm_stderr,none": 0.03822561461565633,
						"acc_stderr,none": 0.03822561461565633,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.44282238442822386,
						"acc_norm,none": 0.44282238442822386,
						"acc_norm_stderr,none": 0.024531250367222056,
						"acc_stderr,none": 0.024531250367222056,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.7850467289719626,
						"acc_norm,none": 0.7850467289719626,
						"acc_norm_stderr,none": 0.028146861857151338,
						"acc_stderr,none": 0.028146861857151338,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.44715447154471544,
						"acc_norm,none": 0.44715447154471544,
						"acc_norm_stderr,none": 0.0450143283311066,
						"acc_stderr,none": 0.0450143283311066,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.4426229508196721,
						"acc_norm,none": 0.4426229508196721,
						"acc_norm_stderr,none": 0.04515426947106743,
						"acc_stderr,none": 0.04515426947106743,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.5380952380952381,
						"acc_norm,none": 0.5380952380952381,
						"acc_norm_stderr,none": 0.034485192220162664,
						"acc_stderr,none": 0.034485192220162664,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.4888888888888889,
						"acc_norm,none": 0.4888888888888889,
						"acc_norm_stderr,none": 0.037362525904368636,
						"acc_stderr,none": 0.037362525904368636,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.5873015873015873,
						"acc_norm,none": 0.5873015873015873,
						"acc_norm_stderr,none": 0.03590608560215488,
						"acc_stderr,none": 0.03590608560215488,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.0443230749598035,
						"acc_stderr,none": 0.0443230749598035,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.46206896551724136,
						"acc_norm,none": 0.46206896551724136,
						"acc_norm_stderr,none": 0.041546596717075474,
						"acc_stderr,none": 0.041546596717075474,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.5238095238095238,
						"acc_norm,none": 0.5238095238095238,
						"acc_norm_stderr,none": 0.04897341376234782,
						"acc_stderr,none": 0.04897341376234782,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.4857142857142857,
						"acc_norm,none": 0.4857142857142857,
						"acc_norm_stderr,none": 0.03788942763158507,
						"acc_stderr,none": 0.03788942763158507,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.35545023696682465,
						"acc_norm,none": 0.35545023696682465,
						"acc_norm_stderr,none": 0.033029955091808956,
						"acc_stderr,none": 0.033029955091808956,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.324468085106383,
						"acc_norm,none": 0.324468085106383,
						"acc_norm_stderr,none": 0.024176492541518102,
						"acc_stderr,none": 0.024176492541518102,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.03289758474798845,
						"acc_stderr,none": 0.03289758474798845,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.5114942528735632,
						"acc_norm,none": 0.5114942528735632,
						"acc_norm_stderr,none": 0.03800425000198233,
						"acc_stderr,none": 0.03800425000198233,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.43703703703703706,
						"acc_norm,none": 0.43703703703703706,
						"acc_norm_stderr,none": 0.04284958639753399,
						"acc_stderr,none": 0.04284958639753399,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.5176991150442478,
						"acc_norm,none": 0.5176991150442478,
						"acc_norm_stderr,none": 0.03331244287560829,
						"acc_stderr,none": 0.03331244287560829,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.46060606060606063,
						"acc_norm,none": 0.46060606060606063,
						"acc_norm_stderr,none": 0.03892207016552013,
						"acc_stderr,none": 0.03892207016552013,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.3621621621621622,
						"acc_norm,none": 0.3621621621621622,
						"acc_norm_stderr,none": 0.03543217115138485,
						"acc_stderr,none": 0.03543217115138485,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.5502958579881657,
						"acc_norm,none": 0.5502958579881657,
						"acc_norm_stderr,none": 0.03838017272948938,
						"acc_stderr,none": 0.03838017272948938,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.6521739130434783,
						"acc_norm,none": 0.6521739130434783,
						"acc_norm_stderr,none": 0.03765327842541042,
						"acc_stderr,none": 0.03765327842541042,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.5625,
						"acc_norm,none": 0.5625,
						"acc_norm_stderr,none": 0.0393415738622931,
						"acc_stderr,none": 0.0393415738622931,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.1646951294632758,
						"mcc_stderr,none": 0.032336357657722976
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.0348735088019777,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.5687239117471674,
						"likelihood_diff_stderr,none": 0.5098347881240998,
						"pct_stereotype,none": 0.6405784138342278,
						"pct_stereotype_stderr,none": 0.06739247701417342
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.7883124627310676,
						"likelihood_diff_stderr,none": 0.0872409379288738,
						"pct_stereotype,none": 0.6636851520572451,
						"pct_stereotype_stderr,none": 0.011540299085418102
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.269230769230769,
						"likelihood_diff_stderr,none": 0.41150615223553827,
						"pct_stereotype,none": 0.7582417582417582,
						"pct_stereotype_stderr,none": 0.04513082148355002
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.4772727272727275,
						"likelihood_diff_stderr,none": 1.8248457423768927,
						"pct_stereotype,none": 0.9090909090909091,
						"pct_stereotype_stderr,none": 0.0909090909090909
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.998076923076923,
						"likelihood_diff_stderr,none": 0.6023696963738029,
						"pct_stereotype,none": 0.7538461538461538,
						"pct_stereotype_stderr,none": 0.05384615384615383
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.633203125,
						"likelihood_diff_stderr,none": 0.15571107189102557,
						"pct_stereotype,none": 0.628125,
						"pct_stereotype_stderr,none": 0.02705990013900488
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.580439814814815,
						"likelihood_diff_stderr,none": 0.2516977445096573,
						"pct_stereotype,none": 0.6111111111111112,
						"pct_stereotype_stderr,none": 0.03324708911809117
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.272569444444445,
						"likelihood_diff_stderr,none": 0.342302725720032,
						"pct_stereotype,none": 0.7777777777777778,
						"pct_stereotype_stderr,none": 0.04933922619854288
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.6636318897637796,
						"likelihood_diff_stderr,none": 0.1497048381160624,
						"pct_stereotype,none": 0.5688976377952756,
						"pct_stereotype_stderr,none": 0.021993952705996092
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.8975225225225225,
						"likelihood_diff_stderr,none": 0.3431282467977641,
						"pct_stereotype,none": 0.7477477477477478,
						"pct_stereotype_stderr,none": 0.04140938118194942
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.899193548387097,
						"likelihood_diff_stderr,none": 0.43507542140841865,
						"pct_stereotype,none": 0.9247311827956989,
						"pct_stereotype_stderr,none": 0.027505616493839195
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.426315789473684,
						"likelihood_diff_stderr,none": 0.25263335566301814,
						"pct_stereotype,none": 0.7263157894736842,
						"pct_stereotype_stderr,none": 0.03243072906189839
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.3464147286821704,
						"likelihood_diff_stderr,none": 0.0761532609450768,
						"pct_stereotype,none": 0.6171735241502684,
						"pct_stereotype_stderr,none": 0.011873195510133001
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.125,
						"likelihood_diff_stderr,none": 0.27438075779778703,
						"pct_stereotype,none": 0.6555555555555556,
						"pct_stereotype_stderr,none": 0.050369697187736755
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.576923076923077,
						"likelihood_diff_stderr,none": 0.5692329348538154,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.14044168141158106
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.083333333333333,
						"likelihood_diff_stderr,none": 0.5375832275059617,
						"pct_stereotype,none": 0.7727272727272727,
						"pct_stereotype_stderr,none": 0.05197926135426052
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.0070093457943927,
						"likelihood_diff_stderr,none": 0.14845478699869097,
						"pct_stereotype,none": 0.5919003115264797,
						"pct_stereotype_stderr,none": 0.02747466632766759
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.338932806324111,
						"likelihood_diff_stderr,none": 0.18979355687703497,
						"pct_stereotype,none": 0.4782608695652174,
						"pct_stereotype_stderr,none": 0.03146725497633679
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.6302083333333335,
						"likelihood_diff_stderr,none": 0.4444619714089789,
						"pct_stereotype,none": 0.7083333333333334,
						"pct_stereotype_stderr,none": 0.05394274771736147
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.0505434782608694,
						"likelihood_diff_stderr,none": 0.14012753775005438,
						"pct_stereotype,none": 0.5434782608695652,
						"pct_stereotype_stderr,none": 0.023249599562309698
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.541304347826087,
						"likelihood_diff_stderr,none": 0.3013236821593022,
						"pct_stereotype,none": 0.782608695652174,
						"pct_stereotype_stderr,none": 0.038631448549506
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.818681318681319,
						"likelihood_diff_stderr,none": 0.29901307785363185,
						"pct_stereotype,none": 0.7912087912087912,
						"pct_stereotype_stderr,none": 0.042843052065094304
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.7847576530612246,
						"likelihood_diff_stderr,none": 0.25064762373833827,
						"pct_stereotype,none": 0.7346938775510204,
						"pct_stereotype_stderr,none": 0.03161619058128502
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.012303149606299213,
						"exact_match_stderr,none": 0.0024460482822194203
					},
					"glue": {
						"acc,none": 0.654508099094807,
						"acc_stderr,none": 0.0069565388780035795,
						"alias": "glue",
						"f1,none": 0.6461024462989778,
						"f1_stderr,none": 0.0002526292050369486,
						"mcc,none": 0.1646951294632758,
						"mcc_stderr,none": 0.032336357657722976
					},
					"hellaswag": {
						"acc,none": 0.5888269269069907,
						"acc_norm,none": 0.7811192989444333,
						"acc_norm_stderr,none": 0.004126424809818348,
						"acc_stderr,none": 0.004910409150135492,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.27126768697660997,
						"acc_norm,none": 0.27126768697660997,
						"acc_norm_stderr,none": 0.029454766992594274,
						"acc_stderr,none": 0.029454766992594274,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.040936018074033256,
						"acc_stderr,none": 0.040936018074033256,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.258,
						"acc_norm,none": 0.258,
						"acc_norm_stderr,none": 0.013842963108656603,
						"acc_stderr,none": 0.013842963108656603,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.286,
						"acc_norm,none": 0.286,
						"acc_norm_stderr,none": 0.01429714686251791,
						"acc_stderr,none": 0.01429714686251791,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.253,
						"acc_norm,none": 0.253,
						"acc_norm_stderr,none": 0.01375427861358708,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.01428212095520048,
						"acc_stderr,none": 0.01428212095520048,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.0181396916738784,
						"acc_stderr,none": 0.0181396916738784,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774166,
						"acc_stderr,none": 0.013877773329774166,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.359,
						"acc_norm,none": 0.359,
						"acc_norm_stderr,none": 0.015177264224798592,
						"acc_stderr,none": 0.015177264224798592,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.249,
						"acc_norm,none": 0.249,
						"acc_norm_stderr,none": 0.013681600278702306,
						"acc_stderr,none": 0.013681600278702306,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.02808592343999728,
						"acc_stderr,none": 0.02808592343999728,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986184,
						"acc_stderr,none": 0.014062601350986184,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.36153846153846153,
						"acc_norm,none": 0.36153846153846153,
						"acc_norm_stderr,none": 0.042300915595389274,
						"acc_stderr,none": 0.042300915595389274,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.045126085985421255,
						"acc_stderr,none": 0.045126085985421255,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.013736254390651152,
						"acc_stderr,none": 0.013736254390651152,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740671,
						"acc_stderr,none": 0.014142984975740671,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.258,
						"acc_norm,none": 0.258,
						"acc_norm_stderr,none": 0.013842963108656603,
						"acc_stderr,none": 0.013842963108656603,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.01355063170555596,
						"acc_stderr,none": 0.01355063170555596,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.279,
						"acc_norm,none": 0.279,
						"acc_norm_stderr,none": 0.014190150117612032,
						"acc_stderr,none": 0.014190150117612032,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.01379003862087284,
						"acc_stderr,none": 0.01379003862087284,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.279,
						"acc_norm,none": 0.279,
						"acc_norm_stderr,none": 0.01419015011761203,
						"acc_stderr,none": 0.01419015011761203,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.287,
						"acc_norm,none": 0.287,
						"acc_norm_stderr,none": 0.014312087053809963,
						"acc_stderr,none": 0.014312087053809963,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816505,
						"acc_stderr,none": 0.04229525846816505,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.01435639599990569,
						"acc_stderr,none": 0.01435639599990569,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.302,
						"acc_norm,none": 0.302,
						"acc_norm_stderr,none": 0.01452608023545955,
						"acc_stderr,none": 0.01452608023545955,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.294,
						"acc_norm,none": 0.294,
						"acc_norm_stderr,none": 0.014414290540008213,
						"acc_stderr,none": 0.014414290540008213,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.013663187134877646,
						"acc_stderr,none": 0.013663187134877646,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462621,
						"acc_stderr,none": 0.014078856992462621,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.2733333333333333,
						"acc_norm,none": 0.2733333333333333,
						"acc_norm_stderr,none": 0.01820960423827394,
						"acc_stderr,none": 0.01820960423827394,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750648,
						"acc_stderr,none": 0.013626065817750648,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.01392928659425973,
						"acc_stderr,none": 0.01392928659425973,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.014205696104091503,
						"acc_stderr,none": 0.014205696104091503,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.304,
						"acc_norm,none": 0.304,
						"acc_norm_stderr,none": 0.014553205687950444,
						"acc_stderr,none": 0.014553205687950444,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.044619604333847394,
						"acc_stderr,none": 0.044619604333847394,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.24333333333333335,
						"acc_norm,none": 0.24333333333333335,
						"acc_norm_stderr,none": 0.02481518457232592,
						"acc_stderr,none": 0.02481518457232592,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796257,
						"acc_stderr,none": 0.013996674851796257,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314142,
						"acc_stderr,none": 0.013644675781314142,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.256,
						"acc_norm,none": 0.256,
						"acc_norm_stderr,none": 0.013807775152234183,
						"acc_stderr,none": 0.013807775152234183,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.02808592343999728,
						"acc_stderr,none": 0.02808592343999728,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.244,
						"acc_norm,none": 0.244,
						"acc_norm_stderr,none": 0.013588548437881431,
						"acc_stderr,none": 0.013588548437881431,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.287,
						"acc_norm,none": 0.287,
						"acc_norm_stderr,none": 0.014312087053809965,
						"acc_stderr,none": 0.014312087053809965,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.029365141882663322,
						"acc_stderr,none": 0.029365141882663322,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.328,
						"acc_norm,none": 0.328,
						"acc_norm_stderr,none": 0.014853842487270333,
						"acc_stderr,none": 0.014853842487270333,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5768471826353869,
						"acc_norm,none": 0.58,
						"acc_norm_stderr,none": 0.0004881763527054103,
						"acc_stderr,none": 0.05909407697651153,
						"alias": "kobest",
						"f1,none": 0.5359541396406512,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.603988603988604,
						"acc_stderr,none": 0.01305687897445091,
						"alias": " - kobest_boolq",
						"f1,none": 0.5426978008459187,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.661,
						"acc_stderr,none": 0.01497675877162034,
						"alias": " - kobest_copa",
						"f1,none": 0.6598158188654707,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.45,
						"acc_norm,none": 0.58,
						"acc_norm_stderr,none": 0.02209471322976178,
						"acc_stderr,none": 0.022270877485360444,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.4466610708498513,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.6801007556675063,
						"acc_stderr,none": 0.023439354253007107,
						"alias": " - kobest_sentineg",
						"f1,none": 0.6545342423515708,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4976190476190476,
						"acc_stderr,none": 0.014091337450940527,
						"alias": " - kobest_wic",
						"f1,none": 0.4282084682614048,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7378226275955754,
						"acc_stderr,none": 0.01516374973808694,
						"alias": "lambada",
						"perplexity,none": 3.2182590216973956,
						"perplexity_stderr,none": 0.13595244439971646
					},
					"lambada_cloze": {
						"acc,none": 0.43421307975936346,
						"acc_stderr,none": 0.05220703656714691,
						"alias": "lambada_cloze",
						"perplexity,none": 29.17497178553312,
						"perplexity_stderr,none": 6.26454494206546
					},
					"lambada_multilingual": {
						"acc,none": 0.5713953037065786,
						"acc_stderr,none": 0.0817126929746517,
						"alias": "lambada_multilingual",
						"perplexity,none": 16.40384721660657,
						"perplexity_stderr,none": 6.333659640117861
					},
					"lambada_openai": {
						"acc,none": 0.7647972055113527,
						"acc_stderr,none": 0.005908897517027224,
						"alias": " - lambada_openai",
						"perplexity,none": 2.975408349533753,
						"perplexity_stderr,none": 0.053701156407645174
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.3306811566078013,
						"acc_stderr,none": 0.006554405748731915,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 41.60429083136416,
						"perplexity_stderr,none": 1.0381890251473764
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.46031437997283137,
						"acc_stderr,none": 0.006944000878968677,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 26.904169959008744,
						"perplexity_stderr,none": 1.4630933113855904
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7665437609159713,
						"acc_stderr,none": 0.0058936357584084866,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 2.9756806189799465,
						"perplexity_stderr,none": 0.05371521143950206
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4865127110421114,
						"acc_stderr,none": 0.006963442876327699,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 22.676987008286588,
						"perplexity_stderr,none": 1.0846609611989606
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5872307393751213,
						"acc_stderr,none": 0.006859147422201025,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 12.893482885374874,
						"perplexity_stderr,none": 0.6119013677300463
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5563749272268581,
						"acc_stderr,none": 0.00692155843663848,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 16.568915611382707,
						"perplexity_stderr,none": 0.8625597122544769
					},
					"lambada_standard": {
						"acc,none": 0.710071802833301,
						"acc_stderr,none": 0.006321329576857211,
						"alias": " - lambada_standard",
						"perplexity,none": 3.4618321122850473,
						"perplexity_stderr,none": 0.06646637559853696
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.5377450029109256,
						"acc_stderr,none": 0.006946100647081567,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 16.745652739702074,
						"perplexity_stderr,none": 0.39981066363211903
					},
					"logiqa": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.017803862148538015,
						"acc_stderr,none": 0.017162894755127066,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.28498727735368956,
						"acc_norm,none": 0.30661577608142493,
						"acc_norm_stderr,none": 0.011633118013515005,
						"acc_stderr,none": 0.011388893410930606,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.27671691792294806,
						"acc_norm,none": 0.2790619765494137,
						"acc_norm_stderr,none": 0.008211072548538903,
						"acc_stderr,none": 0.008189786871508203,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3316034738402881,
						"acc_stderr,none": 0.004845266051691529,
						"alias": "mc_taco",
						"f1,none": 0.4876187383291386,
						"f1_stderr,none": 0.005545657243364791
					},
					"medmcqa": {
						"acc,none": 0.443939756155869,
						"acc_norm,none": 0.443939756155869,
						"acc_norm_stderr,none": 0.007683001681622904,
						"acc_stderr,none": 0.007683001681622904,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.4721131186174391,
						"acc_norm,none": 0.4721131186174391,
						"acc_norm_stderr,none": 0.01399748185593381,
						"acc_stderr,none": 0.01399748185593381,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.5621706309642501,
						"acc_stderr,none": 0.13001464375283633,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145632,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.5777777777777777,
						"acc_stderr,none": 0.04266763404099582,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.5592105263157895,
						"acc_stderr,none": 0.04040311062490437,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.04923659639173309,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.6264150943396226,
						"acc_stderr,none": 0.02977308271331987,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.6458333333333334,
						"acc_stderr,none": 0.039994111357535424,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.05,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.5722543352601156,
						"acc_stderr,none": 0.03772446857518026,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.38235294117647056,
						"acc_stderr,none": 0.04835503696107223,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.4553191489361702,
						"acc_stderr,none": 0.03255525359340354,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.3508771929824561,
						"acc_stderr,none": 0.04489539350270697,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.5586206896551724,
						"acc_stderr,none": 0.04137931034482757,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.3386243386243386,
						"acc_stderr,none": 0.024373197867983053,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3412698412698413,
						"acc_stderr,none": 0.04240799327574924,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.7032258064516129,
						"acc_stderr,none": 0.025988500792411898,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.4630541871921182,
						"acc_stderr,none": 0.035083705204426656,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.050161355804659205,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.7272727272727273,
						"acc_stderr,none": 0.03477691162163659,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.7424242424242424,
						"acc_stderr,none": 0.03115626951964683,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.7823834196891192,
						"acc_stderr,none": 0.029778663037752943,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.5794871794871795,
						"acc_stderr,none": 0.025028610276710855,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.28888888888888886,
						"acc_stderr,none": 0.027634907264178544,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.5840336134453782,
						"acc_stderr,none": 0.03201650100739611,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.33112582781456956,
						"acc_stderr,none": 0.038425817186598696,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.7724770642201835,
						"acc_stderr,none": 0.017974463578776502,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.46296296296296297,
						"acc_stderr,none": 0.03400603625538271,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.7254901960784313,
						"acc_stderr,none": 0.031321798030832904,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.7426160337552743,
						"acc_stderr,none": 0.028458820991460295,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.6636771300448431,
						"acc_stderr,none": 0.031708824268455,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.6564885496183206,
						"acc_stderr,none": 0.041649760719448786,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.5126461211477152,
						"acc_stderr,none": 0.14336172282320195,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.71900826446281,
						"acc_stderr,none": 0.04103203830514512,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.7037037037037037,
						"acc_stderr,none": 0.04414343666854933,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.6687116564417178,
						"acc_stderr,none": 0.03697983910025588,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.4375,
						"acc_stderr,none": 0.04708567521880525,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.6990291262135923,
						"acc_stderr,none": 0.04541609446503948,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.811965811965812,
						"acc_stderr,none": 0.02559819368665224,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.7432950191570882,
						"acc_stderr,none": 0.015620480263064528,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.615606936416185,
						"acc_stderr,none": 0.026189666966272035,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2424581005586592,
						"acc_stderr,none": 0.014333522059217892,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.6339869281045751,
						"acc_stderr,none": 0.02758281141515962,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.6337302864499517,
						"acc_stderr,none": 0.10048153423166517,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.6366559485530546,
						"acc_stderr,none": 0.027316847674192714,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.6450617283950617,
						"acc_stderr,none": 0.026624152478845853,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.41843971631205673,
						"acc_stderr,none": 0.02942799403941999,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.4426336375488918,
						"acc_stderr,none": 0.012685906538206244,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.5882352941176471,
						"acc_stderr,none": 0.029896163033125474,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.5751633986928104,
						"acc_stderr,none": 0.01999797303545834,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.6363636363636364,
						"acc_stderr,none": 0.04607582090719976,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.6081632653061224,
						"acc_stderr,none": 0.031251275910891656,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.6600584985375366,
						"acc_stderr,none": 0.09437634218056394,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.845771144278607,
						"acc_stderr,none": 0.02553843336857833,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.47002854424357754,
						"acc_stderr,none": 0.11529740884949935,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.04020151261036845,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.46987951807228917,
						"acc_stderr,none": 0.03885425420866767,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.8011695906432749,
						"acc_stderr,none": 0.030611116557432528,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.8053998981151299,
						"acc_stderr,none": 0.0039962650974490616,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7937347436940602,
						"acc_stderr,none": 0.004080861802769054,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6936274509803921,
						"acc_stderr,none": 0.02285024477026493,
						"alias": "mrpc",
						"f1,none": 0.8164464023494861,
						"f1_stderr,none": 0.016114595032901035
					},
					"multimedqa": {
						"acc,none": 0.49325762952448543,
						"acc_norm,none": 0.45377947222593956,
						"acc_norm_stderr,none": 0.00013529425302545396,
						"acc_stderr,none": 0.06287909434422387,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5552805280528053,
						"acc_stderr,none": 0.007137773869165738,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7178329586579084,
						"mrr_stderr,none": 0.010260012039863414,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.41309255079006774,
						"r@2_stderr,none": 0.01655148090296311
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6586719354442226,
						"mrr_stderr,none": 0.01040353373478914,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4582392776523702,
						"r@2_stderr,none": 0.016748591038439245
					},
					"openbookqa": {
						"acc,none": 0.342,
						"acc_norm,none": 0.458,
						"acc_norm_stderr,none": 0.022303966774269945,
						"acc_stderr,none": 0.021236147199899257,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4105,
						"acc_stderr,none": 0.011002518016406625,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3325,
						"acc_stderr,none": 0.01053695348259386,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3585,
						"acc_stderr,none": 0.010725968403790009,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.01114361207351664,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5405,
						"acc_stderr,none": 0.011146389370464352,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.448,
						"acc_stderr,none": 0.011122493197456278,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.433,
						"acc_stderr,none": 0.011082279027990147,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.43785714285714283,
						"acc_stderr,none": 0.06032802635145791,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.8030467899891186,
						"acc_norm,none": 0.8079434167573449,
						"acc_norm_stderr,none": 0.009190740295126475,
						"acc_stderr,none": 0.00927891889800638,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.26665243381725023,
						"acc_norm,none": 0.2778074295473954,
						"acc_norm_stderr,none": 0.003272439208592791,
						"acc_stderr,none": 0.0032307314155471797,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.020514426225628036,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7857064973692255,
						"acc_norm,none": 0.6759789873550113,
						"acc_norm_stderr,none": 0.00922296264668544,
						"acc_stderr,none": 0.13904160115792802,
						"alias": "pythia",
						"bits_per_byte,none": 0.6044402430736675,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5203887371777813,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 2.975408349533753,
						"perplexity_stderr,none": 0.053701156407645174,
						"word_perplexity,none": 9.397091941429304,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.35815602836879434,
						"acc_norm,none": 0.40070921985815605,
						"acc_norm_stderr,none": 0.057445725297582795,
						"acc_stderr,none": 0.04074111048823162,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.04583492485141056,
						"acc_stderr,none": 0.04490887131390718,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.31875,
						"acc_norm,none": 0.41875,
						"acc_norm_stderr,none": 0.039125538756915115,
						"acc_stderr,none": 0.036955560385363254,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3626760563380282,
						"acc_norm,none": 0.3485915492957746,
						"acc_norm_stderr,none": 0.028326433924036703,
						"acc_stderr,none": 0.028578954826942813,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.00676501598687746,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.603512243383626,
						"acc_stderr,none": 0.0024328281556836176,
						"alias": "qqp",
						"f1,none": 0.6445361007628171,
						"f1_stderr,none": 0.0026296539187985924
					},
					"race": {
						"acc,none": 0.35119617224880384,
						"acc_stderr,none": 0.014773430019036974,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2699,
						"em_stderr,none": 0.0044392983383605205,
						"f1,none": 0.2801833335906267,
						"f1_stderr,none": 0.004451155922785669
					},
					"rte": {
						"acc,none": 0.7581227436823105,
						"acc_stderr,none": 0.025775834739144625,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.939,
						"acc_norm,none": 0.936,
						"acc_norm_stderr,none": 0.007743640226919288,
						"acc_stderr,none": 0.007572076091557418,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.7581227436823105,
						"acc_stderr,none": 0.025775834739144625,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.698394495412844,
						"acc_stderr,none": 0.015551094415874421,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5931220633809857,
						"acc_norm,none": 0.7854643606917925,
						"acc_norm_stderr,none": 0.002902309268318626,
						"acc_stderr,none": 0.0034732403049643843,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.865661708429004,
						"acc_stderr,none": 0.06824739752554036,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.9450120192307693,
						"acc_stderr,none": 0.002281508108409556,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.9657444005270093,
						"acc_stderr,none": 0.0018311601553299888,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.6911764705882353,
						"acc_stderr,none": 0.004574786888516813,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3649662990739864,
						"acc_stderr,none": 0.0013719614766716114,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.40024479804161567,
						"bleu_acc_stderr,none": 0.017151605555749138,
						"bleu_diff,none": -3.7574288715127366,
						"bleu_diff_stderr,none": 0.8900981650904477,
						"bleu_max,none": 29.434185881090134,
						"bleu_max_stderr,none": 0.8185610204170272,
						"rouge1_acc,none": 0.39167686658506734,
						"rouge1_acc_stderr,none": 0.017087795881769636,
						"rouge1_diff,none": -4.933360551806529,
						"rouge1_diff_stderr,none": 0.9697129576394805,
						"rouge1_max,none": 55.8658926752527,
						"rouge1_max_stderr,none": 0.8275214809853042,
						"rouge2_acc,none": 0.33047735618115054,
						"rouge2_acc_stderr,none": 0.016466769613698303,
						"rouge2_diff,none": -6.361848550974198,
						"rouge2_diff_stderr,none": 1.1761054306532206,
						"rouge2_max,none": 40.22209187835054,
						"rouge2_max_stderr,none": 1.0131925597036415,
						"rougeL_acc,none": 0.386780905752754,
						"rougeL_acc_stderr,none": 0.017048857010515107,
						"rougeL_diff,none": -4.992825170607286,
						"rougeL_diff_stderr,none": 0.9914374728544091,
						"rougeL_max,none": 52.78305385104008,
						"rougeL_max_stderr,none": 0.8481490715097342
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.40024479804161567,
						"bleu_acc_stderr,none": 0.017151605555749138,
						"bleu_diff,none": -3.7574288715127366,
						"bleu_diff_stderr,none": 0.8900981650904477,
						"bleu_max,none": 29.434185881090134,
						"bleu_max_stderr,none": 0.8185610204170272,
						"rouge1_acc,none": 0.39167686658506734,
						"rouge1_acc_stderr,none": 0.017087795881769636,
						"rouge1_diff,none": -4.933360551806529,
						"rouge1_diff_stderr,none": 0.9697129576394805,
						"rouge1_max,none": 55.8658926752527,
						"rouge1_max_stderr,none": 0.8275214809853042,
						"rouge2_acc,none": 0.33047735618115054,
						"rouge2_acc_stderr,none": 0.016466769613698303,
						"rouge2_diff,none": -6.361848550974198,
						"rouge2_diff_stderr,none": 1.1761054306532206,
						"rouge2_max,none": 40.22209187835054,
						"rouge2_max_stderr,none": 1.0131925597036415,
						"rougeL_acc,none": 0.386780905752754,
						"rougeL_acc_stderr,none": 0.017048857010515107,
						"rougeL_diff,none": -4.992825170607286,
						"rougeL_diff_stderr,none": 0.9914374728544091,
						"rougeL_max,none": 52.78305385104008,
						"rougeL_max_stderr,none": 0.8481490715097342
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2974296205630355,
						"acc_stderr,none": 0.016002651487360995,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.43250297758493733,
						"acc_stderr,none": 0.014356987746923034,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.012303149606299213,
						"exact_match_stderr,none": 0.0024460482822194203
					},
					"wic": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.01981072129375818,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6044402430736675,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5203887371777813,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 9.397091941429304,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7426992896606156,
						"acc_stderr,none": 0.012285989618865708,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.49295774647887325,
						"acc_stderr,none": 0.059755502635482904,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8681318681318682,
						"acc_stderr,none": 0.020515321360773598,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6447272727272727,
						"acc_stderr,none": 0.078655925202766,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.02168382753928612,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.736,
						"acc_stderr,none": 0.01973288558592209,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.018544211375820324,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.021854684955611263,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.022033677993740865,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.664,
						"acc_stderr,none": 0.021144791425048846,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.756,
						"acc_stderr,none": 0.019226734893614598,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.02001121929807353,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.445140562248996,
						"acc_stderr,none": 0.05170020859669855,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.334136546184739,
						"acc_stderr,none": 0.009454577602463621,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4775100401606426,
						"acc_stderr,none": 0.010011929439394012,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.5004016064257029,
						"acc_stderr,none": 0.010022069634353856,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.40602409638554215,
						"acc_stderr,none": 0.009843462007384216,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5393574297188755,
						"acc_stderr,none": 0.009990976095711881,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.493574297188755,
						"acc_stderr,none": 0.010021245217159398,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5100401606425703,
						"acc_stderr,none": 0.010020052116889137,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.44497991967871486,
						"acc_stderr,none": 0.009961210239024635,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4947791164658635,
						"acc_stderr,none": 0.010021526496530347,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.41445783132530123,
						"acc_stderr,none": 0.009874311310483544,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3895582329317269,
						"acc_stderr,none": 0.00977452959078366,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.46987951807228917,
						"acc_stderr,none": 0.01000387141951773,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.43092369477911646,
						"acc_stderr,none": 0.009925970741520651,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.42730923694779116,
						"acc_stderr,none": 0.009915595034908124,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3441767068273092,
						"acc_stderr,none": 0.00952295446980604,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.658684796341977,
						"acc_stderr,none": 0.059368537675491516,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.6399735274652548,
						"acc_stderr,none": 0.012352638981498536,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7935142289874255,
						"acc_stderr,none": 0.010416790997712047,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7405691594970218,
						"acc_stderr,none": 0.011279897124457372,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5929847782925215,
						"acc_stderr,none": 0.012642664836816928,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6432825943084051,
						"acc_stderr,none": 0.01232748767711036,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6922567835870285,
						"acc_stderr,none": 0.01187789223516454,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5691594970218399,
						"acc_stderr,none": 0.012743443034698407,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.7174056915949703,
						"acc_stderr,none": 0.011587123627044827,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5691594970218399,
						"acc_stderr,none": 0.01274344303469841,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.6101919258769027,
						"acc_stderr,none": 0.012550764190647013,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6770350761085374,
						"acc_stderr,none": 0.012033578346967668,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8356934142503933,
						"acc_stderr,none": 0.03541716419371988,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8903225806451613,
						"acc_stderr,none": 0.0064820778685025105,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7469879518072289,
						"acc_stderr,none": 0.048008758304372776,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7716371220020855,
						"acc_stderr,none": 0.013562400205050158,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7870722433460076,
						"acc_stderr,none": 0.025291395445662845,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.7015873015873015,
						"acc_stderr,none": 0.025821691360354258,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.8293650793650794,
						"acc_stderr,none": 0.016773466959061005,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "m8than/Finch-14B-Final"
	},
	"m8than/Finch-14B-Final2": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6710822998872604,
						"acc_norm,none": 0.673337091319053,
						"acc_norm_stderr,none": 0.0842696561687326,
						"acc_stderr,none": 0.09834458192072633,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.52875,
						"acc_stderr,none": 0.05343419822726108,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8475522388059702,
						"acc_stderr,none": 0.1359974598208937,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.4649456052495251,
						"acc_norm,none": 0.4649456052495251,
						"acc_norm_stderr,none": 0.10208795541917443,
						"acc_stderr,none": 0.10208795541917443,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.6639917818008576,
						"acc_stderr,none": 0.00910105549100269,
						"alias": "glue",
						"f1,none": 0.6459843641578645,
						"f1_stderr,none": 0.0002762982658319697,
						"mcc,none": 0.15191153194078577,
						"mcc_stderr,none": 0.033425150533706614
					},
					"lambada": {
						"acc,none": 0.7279254803027363,
						"acc_stderr,none": 0.016352526740605534,
						"alias": "lambada",
						"perplexity,none": 3.2499198564639857,
						"perplexity_stderr,none": 0.13915345161118498
					},
					"lambada_multilingual": {
						"acc,none": 0.5705026198331069,
						"acc_stderr,none": 0.07935665046551688,
						"alias": "lambada_multilingual",
						"perplexity,none": 16.169373730394376,
						"perplexity_stderr,none": 6.322488097039559
					},
					"mmlu": {
						"acc,none": 0.560461472724683,
						"acc_stderr,none": 0.13217735852620205,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.5086078639744952,
						"acc_stderr,none": 0.14622754639234883,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.6266495011264885,
						"acc_stderr,none": 0.10678706055887757,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.6626584335391615,
						"acc_stderr,none": 0.09958788043918582,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.47288296860133205,
						"acc_stderr,none": 0.11294065176078537,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.4322857142857143,
						"acc_stderr,none": 0.060629863136872464,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7875356171897289,
						"acc_norm,none": 0.676339223864274,
						"acc_norm_stderr,none": 0.008969816217225295,
						"acc_stderr,none": 0.13903219518493298,
						"alias": "pythia",
						"bits_per_byte,none": 0.6094015345394527,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5256262103243146,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.001806138591158,
						"perplexity_stderr,none": 0.0548083995780737,
						"word_perplexity,none": 9.571497284071752,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.3927196337205575,
						"acc_stderr,none": 0.001539526203967856,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.44063647490820074,
						"bleu_acc_stderr,none": 0.01737969755543745,
						"bleu_diff,none": -1.2533518346355572,
						"bleu_diff_stderr,none": 0.8689505310271912,
						"bleu_max,none": 28.341628601169912,
						"bleu_max_stderr,none": 0.8237729833003474,
						"rouge1_acc,none": 0.45777233782129745,
						"rouge1_acc_stderr,none": 0.01744096571248212,
						"rouge1_diff,none": -0.8908174310569977,
						"rouge1_diff_stderr,none": 1.0247086367622387,
						"rouge1_max,none": 54.90593082611275,
						"rouge1_max_stderr,none": 0.8355289806404766,
						"rouge2_acc,none": 0.3463892288861689,
						"rouge2_acc_stderr,none": 0.01665699710912513,
						"rouge2_diff,none": -3.2631442564857225,
						"rouge2_diff_stderr,none": 1.194783861109434,
						"rouge2_max,none": 38.68966059347235,
						"rouge2_max_stderr,none": 1.0413854566237415,
						"rougeL_acc,none": 0.44063647490820074,
						"rougeL_acc_stderr,none": 0.01737969755543745,
						"rougeL_diff,none": -1.081620037683707,
						"rougeL_diff_stderr,none": 1.0438889153268645,
						"rougeL_max,none": 51.884226949620064,
						"rougeL_max_stderr,none": 0.8556442714889916
					},
					"xcopa": {
						"acc,none": 0.647090909090909,
						"acc_stderr,none": 0.07959652641172524,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4403748326639893,
						"acc_stderr,none": 0.04728209406058379,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6612718849648035,
						"acc_stderr,none": 0.059112793830991224,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8426612721959991,
						"acc_stderr,none": 0.03507169622463686,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6710822998872604,
						"acc_norm,none": 0.673337091319053,
						"acc_norm_stderr,none": 0.0842696561687326,
						"acc_stderr,none": 0.09834458192072633,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.52875,
						"acc_stderr,none": 0.05343419822726108,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.015204840912919501,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.015768596914394382,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.495,
						"acc_stderr,none": 0.014439052549669151,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.46331058020477817,
						"acc_norm,none": 0.49573378839590443,
						"acc_norm_stderr,none": 0.014610858923956952,
						"acc_stderr,none": 0.01457200052775699,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7735690235690236,
						"acc_norm,none": 0.7609427609427609,
						"acc_norm_stderr,none": 0.00875175472358042,
						"acc_stderr,none": 0.008587873686799268,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8475522388059702,
						"acc_stderr,none": 0.1359974598208937,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996685,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045074,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298176,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341678,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315145,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.796,
						"acc_stderr,none": 0.012749374359024386,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.629,
						"acc_stderr,none": 0.015283736211823187,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.012461592646659964,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340983,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.00244335219932982,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036368,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380718,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426128,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.00521950603441004,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817145,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323488,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767836,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.00581453427273493,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103319,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.013232501619085337,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.012461592646659992,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695447,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.011419913065098701,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000098,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.359,
						"acc_stderr,none": 0.015177264224798596,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847165,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816315,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.727,
						"acc_stderr,none": 0.014095022868717593,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341681,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849883,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333364,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639233,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.00669595667816304,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.014385511563477343,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525047,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.015797897758042762,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.684,
						"acc_stderr,none": 0.014709193056057134,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.716,
						"acc_stderr,none": 0.014267009061031306,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651535,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386702,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651523,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704159,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.789,
						"acc_stderr,none": 0.012909130321042095,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987295,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426114,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.00843458014024067,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.011075814808567038,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.014013292702729488,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.01444273494157502,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.971,
						"acc_stderr,none": 0.005309160685756966,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697061,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000065,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.677,
						"acc_stderr,none": 0.014794927843348632,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.515,
						"acc_stderr,none": 0.015812179641814902,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.717,
						"acc_stderr,none": 0.014251810906481739,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.00940661918462122,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.014297146862517908,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.0093636893732481,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244073,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.741,
						"acc_stderr,none": 0.013860415257527911,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397332,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.00682976175614092,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.0073351758537068415,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036446,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319419,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.442,
						"acc_stderr,none": 0.015712507211864207,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.401,
						"acc_stderr,none": 0.015506109745498322,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.4649456052495251,
						"acc_norm,none": 0.4649456052495251,
						"acc_norm_stderr,none": 0.10208795541917443,
						"acc_stderr,none": 0.10208795541917443,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.4319526627218935,
						"acc_norm,none": 0.4319526627218935,
						"acc_norm_stderr,none": 0.03821692157382019,
						"acc_stderr,none": 0.03821692157382019,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.037698374558241474,
						"acc_stderr,none": 0.037698374558241474,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.3048780487804878,
						"acc_norm,none": 0.3048780487804878,
						"acc_norm_stderr,none": 0.03605784583600454,
						"acc_stderr,none": 0.03605784583600454,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.60625,
						"acc_norm,none": 0.60625,
						"acc_norm_stderr,none": 0.03874695666685831,
						"acc_stderr,none": 0.03874695666685831,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.036810508691615486,
						"acc_stderr,none": 0.036810508691615486,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.5119617224880383,
						"acc_norm,none": 0.5119617224880383,
						"acc_norm_stderr,none": 0.03465883983665193,
						"acc_stderr,none": 0.03465883983665193,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.40625,
						"acc_norm,none": 0.40625,
						"acc_norm_stderr,none": 0.03894932504400619,
						"acc_stderr,none": 0.03894932504400619,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.5801526717557252,
						"acc_norm,none": 0.5801526717557252,
						"acc_norm_stderr,none": 0.0432857721526297,
						"acc_stderr,none": 0.0432857721526297,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.39705882352941174,
						"acc_norm,none": 0.39705882352941174,
						"acc_norm_stderr,none": 0.04211123842140102,
						"acc_stderr,none": 0.04211123842140102,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.5233644859813084,
						"acc_norm,none": 0.5233644859813084,
						"acc_norm_stderr,none": 0.04851124172329673,
						"acc_stderr,none": 0.04851124172329673,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.56656346749226,
						"acc_norm,none": 0.56656346749226,
						"acc_norm_stderr,none": 0.027615893839097752,
						"acc_stderr,none": 0.027615893839097752,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3627450980392157,
						"acc_norm,none": 0.3627450980392157,
						"acc_norm_stderr,none": 0.03374499356319354,
						"acc_stderr,none": 0.03374499356319354,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.553072625698324,
						"acc_norm,none": 0.553072625698324,
						"acc_norm_stderr,none": 0.03726486555057904,
						"acc_stderr,none": 0.03726486555057904,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.42616033755274263,
						"acc_norm,none": 0.42616033755274263,
						"acc_norm_stderr,none": 0.03219035703131774,
						"acc_stderr,none": 0.03219035703131774,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.044801270921106716,
						"acc_stderr,none": 0.044801270921106716,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.616822429906542,
						"acc_norm,none": 0.616822429906542,
						"acc_norm_stderr,none": 0.04722013080771233,
						"acc_stderr,none": 0.04722013080771233,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.42452830188679247,
						"acc_norm,none": 0.42452830188679247,
						"acc_norm_stderr,none": 0.04823593037243471,
						"acc_stderr,none": 0.04823593037243471,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.35185185185185186,
						"acc_norm,none": 0.35185185185185186,
						"acc_norm_stderr,none": 0.04616631111801715,
						"acc_stderr,none": 0.04616631111801715,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.042324735320550415,
						"acc_stderr,none": 0.042324735320550415,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.42452830188679247,
						"acc_norm,none": 0.42452830188679247,
						"acc_norm_stderr,none": 0.04823593037243471,
						"acc_stderr,none": 0.04823593037243471,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.4725274725274725,
						"acc_norm,none": 0.4725274725274725,
						"acc_norm_stderr,none": 0.030271155718081682,
						"acc_stderr,none": 0.030271155718081682,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.5049019607843137,
						"acc_norm,none": 0.5049019607843137,
						"acc_norm_stderr,none": 0.03509143375606786,
						"acc_stderr,none": 0.03509143375606786,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.5672514619883041,
						"acc_norm,none": 0.5672514619883041,
						"acc_norm_stderr,none": 0.03799978644370606,
						"acc_stderr,none": 0.03799978644370606,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.5170068027210885,
						"acc_norm,none": 0.5170068027210885,
						"acc_norm_stderr,none": 0.041356350546877384,
						"acc_stderr,none": 0.041356350546877384,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.33093525179856115,
						"acc_norm,none": 0.33093525179856115,
						"acc_norm_stderr,none": 0.0400558587253958,
						"acc_stderr,none": 0.0400558587253958,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.4779874213836478,
						"acc_norm,none": 0.4779874213836478,
						"acc_norm_stderr,none": 0.03973929649561242,
						"acc_stderr,none": 0.03973929649561242,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.5460122699386503,
						"acc_norm,none": 0.5460122699386503,
						"acc_norm_stderr,none": 0.0391170190467718,
						"acc_stderr,none": 0.0391170190467718,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.37790697674418605,
						"acc_norm,none": 0.37790697674418605,
						"acc_norm_stderr,none": 0.037078492187232796,
						"acc_stderr,none": 0.037078492187232796,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.43253968253968256,
						"acc_norm,none": 0.43253968253968256,
						"acc_norm_stderr,none": 0.0312711509660525,
						"acc_stderr,none": 0.0312711509660525,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.035476014940069384,
						"acc_stderr,none": 0.035476014940069384,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.6596638655462185,
						"acc_norm,none": 0.6596638655462185,
						"acc_norm_stderr,none": 0.03077805742293167,
						"acc_stderr,none": 0.03077805742293167,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.3565217391304348,
						"acc_norm,none": 0.3565217391304348,
						"acc_norm_stderr,none": 0.031651347692206504,
						"acc_stderr,none": 0.031651347692206504,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.4148148148148148,
						"acc_norm,none": 0.4148148148148148,
						"acc_norm_stderr,none": 0.04256193767901407,
						"acc_stderr,none": 0.04256193767901407,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.46153846153846156,
						"acc_norm,none": 0.46153846153846156,
						"acc_norm_stderr,none": 0.041834744477373405,
						"acc_stderr,none": 0.041834744477373405,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.4772727272727273,
						"acc_norm,none": 0.4772727272727273,
						"acc_norm_stderr,none": 0.037757381195082186,
						"acc_stderr,none": 0.037757381195082186,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.5100671140939598,
						"acc_norm,none": 0.5100671140939598,
						"acc_norm_stderr,none": 0.04109141532737571,
						"acc_stderr,none": 0.04109141532737571,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.4260355029585799,
						"acc_norm,none": 0.4260355029585799,
						"acc_norm_stderr,none": 0.03815142551613447,
						"acc_stderr,none": 0.03815142551613447,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.04069455660284017,
						"acc_stderr,none": 0.04069455660284017,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.5254237288135594,
						"acc_norm,none": 0.5254237288135594,
						"acc_norm_stderr,none": 0.04616522112086746,
						"acc_stderr,none": 0.04616522112086746,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.38181818181818183,
						"acc_norm,none": 0.38181818181818183,
						"acc_norm_stderr,none": 0.04653429807913508,
						"acc_stderr,none": 0.04653429807913508,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.5174825174825175,
						"acc_norm,none": 0.5174825174825175,
						"acc_norm_stderr,none": 0.04193341146460268,
						"acc_stderr,none": 0.04193341146460268,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.4603174603174603,
						"acc_norm,none": 0.4603174603174603,
						"acc_norm_stderr,none": 0.04458029125470973,
						"acc_stderr,none": 0.04458029125470973,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.3675675675675676,
						"acc_norm,none": 0.3675675675675676,
						"acc_norm_stderr,none": 0.03554403659088363,
						"acc_stderr,none": 0.03554403659088363,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.5232558139534884,
						"acc_norm,none": 0.5232558139534884,
						"acc_norm_stderr,none": 0.03819457472859222,
						"acc_stderr,none": 0.03819457472859222,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.46472019464720193,
						"acc_norm,none": 0.46472019464720193,
						"acc_norm_stderr,none": 0.024631693609729385,
						"acc_stderr,none": 0.024631693609729385,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.7897196261682243,
						"acc_norm,none": 0.7897196261682243,
						"acc_norm_stderr,none": 0.027921968584579328,
						"acc_stderr,none": 0.027921968584579328,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.4959349593495935,
						"acc_norm,none": 0.4959349593495935,
						"acc_norm_stderr,none": 0.04526637693357743,
						"acc_stderr,none": 0.04526637693357743,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.45081967213114754,
						"acc_norm,none": 0.45081967213114754,
						"acc_norm_stderr,none": 0.045234128795160075,
						"acc_stderr,none": 0.045234128795160075,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.5285714285714286,
						"acc_norm,none": 0.5285714285714286,
						"acc_norm_stderr,none": 0.03452921053595503,
						"acc_stderr,none": 0.03452921053595503,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.5166666666666667,
						"acc_norm,none": 0.5166666666666667,
						"acc_norm_stderr,none": 0.03735098678123468,
						"acc_stderr,none": 0.03735098678123468,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.5925925925925926,
						"acc_norm,none": 0.5925925925925926,
						"acc_norm_stderr,none": 0.03583551458125162,
						"acc_stderr,none": 0.03583551458125162,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.4051724137931034,
						"acc_norm,none": 0.4051724137931034,
						"acc_norm_stderr,none": 0.045779027749488756,
						"acc_stderr,none": 0.045779027749488756,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.47586206896551725,
						"acc_norm,none": 0.47586206896551725,
						"acc_norm_stderr,none": 0.04161808503501528,
						"acc_stderr,none": 0.04161808503501528,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.5428571428571428,
						"acc_norm,none": 0.5428571428571428,
						"acc_norm_stderr,none": 0.04884859510086342,
						"acc_stderr,none": 0.04884859510086342,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.4857142857142857,
						"acc_norm,none": 0.4857142857142857,
						"acc_norm_stderr,none": 0.037889427631585065,
						"acc_stderr,none": 0.037889427631585065,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.36018957345971564,
						"acc_norm,none": 0.36018957345971564,
						"acc_norm_stderr,none": 0.03312695957406923,
						"acc_stderr,none": 0.03312695957406923,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.3377659574468085,
						"acc_norm,none": 0.3377659574468085,
						"acc_norm_stderr,none": 0.02442294633805669,
						"acc_stderr,none": 0.02442294633805669,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.03289758474798845,
						"acc_stderr,none": 0.03289758474798845,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.4885057471264368,
						"acc_norm,none": 0.4885057471264368,
						"acc_norm_stderr,none": 0.03800425000198232,
						"acc_stderr,none": 0.03800425000198232,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.43703703703703706,
						"acc_norm,none": 0.43703703703703706,
						"acc_norm_stderr,none": 0.04284958639753399,
						"acc_stderr,none": 0.04284958639753399,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.4911504424778761,
						"acc_norm,none": 0.4911504424778761,
						"acc_norm_stderr,none": 0.03332811194650094,
						"acc_stderr,none": 0.03332811194650094,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.4727272727272727,
						"acc_norm,none": 0.4727272727272727,
						"acc_norm_stderr,none": 0.03898531605579419,
						"acc_stderr,none": 0.03898531605579419,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.3567567567567568,
						"acc_norm,none": 0.3567567567567568,
						"acc_norm_stderr,none": 0.035315455206482514,
						"acc_stderr,none": 0.035315455206482514,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.5443786982248521,
						"acc_norm,none": 0.5443786982248521,
						"acc_norm_stderr,none": 0.038423589228359284,
						"acc_stderr,none": 0.038423589228359284,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.6273291925465838,
						"acc_norm,none": 0.6273291925465838,
						"acc_norm_stderr,none": 0.03822525970525206,
						"acc_stderr,none": 0.03822525970525206,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.54375,
						"acc_norm,none": 0.54375,
						"acc_norm_stderr,none": 0.039500492593059405,
						"acc_stderr,none": 0.039500492593059405,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.15191153194078577,
						"mcc_stderr,none": 0.033425150533706614
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.034873508801977704,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.6639917818008576,
						"acc_stderr,none": 0.00910105549100269,
						"alias": "glue",
						"f1,none": 0.6459843641578645,
						"f1_stderr,none": 0.0002762982658319697,
						"mcc,none": 0.15191153194078577,
						"mcc_stderr,none": 0.033425150533706614
					},
					"hellaswag": {
						"acc,none": 0.5768771161123282,
						"acc_norm,none": 0.7648874726150169,
						"acc_norm_stderr,none": 0.004232024522115296,
						"acc_stderr,none": 0.004930448527146673,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7279254803027363,
						"acc_stderr,none": 0.016352526740605534,
						"alias": "lambada",
						"perplexity,none": 3.2499198564639857,
						"perplexity_stderr,none": 0.13915345161118498
					},
					"lambada_multilingual": {
						"acc,none": 0.5705026198331069,
						"acc_stderr,none": 0.07935665046551688,
						"alias": "lambada_multilingual",
						"perplexity,none": 16.169373730394376,
						"perplexity_stderr,none": 6.322488097039559
					},
					"lambada_openai": {
						"acc,none": 0.759169415874248,
						"acc_stderr,none": 0.005957132284367888,
						"alias": " - lambada_openai",
						"perplexity,none": 3.001806138591158,
						"perplexity_stderr,none": 0.0548083995780737
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4589559479914613,
						"acc_stderr,none": 0.006942468015741764,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 27.213026200762556,
						"perplexity_stderr,none": 1.4867757934202306
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7576169221812536,
						"acc_stderr,none": 0.005970188644154153,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 2.9999165682619666,
						"perplexity_stderr,none": 0.05477772562618202
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.48845332815835435,
						"acc_stderr,none": 0.00696411992274735,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 21.630412618047156,
						"perplexity_stderr,none": 1.0339953094729808
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5901416650494857,
						"acc_stderr,none": 0.00685183817515505,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 12.7001010026148,
						"perplexity_stderr,none": 0.5995570948205043
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5573452357849796,
						"acc_stderr,none": 0.006920011095249945,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 16.30341226228541,
						"perplexity_stderr,none": 0.8467650753430807
					},
					"lambada_standard": {
						"acc,none": 0.697651853289346,
						"acc_stderr,none": 0.006398602102697942,
						"alias": " - lambada_standard",
						"perplexity,none": 3.499898964516897,
						"perplexity_stderr,none": 0.06693046629134702
					},
					"logiqa": {
						"acc,none": 0.25499231950844853,
						"acc_norm,none": 0.29339477726574503,
						"acc_norm_stderr,none": 0.017859032704399508,
						"acc_stderr,none": 0.01709571410527982,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.560461472724683,
						"acc_stderr,none": 0.13217735852620205,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.5259259259259259,
						"acc_stderr,none": 0.04313531696750575,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.5789473684210527,
						"acc_stderr,none": 0.04017901275981749,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.6150943396226415,
						"acc_stderr,none": 0.02994649856769995,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.6527777777777778,
						"acc_stderr,none": 0.03981240543717861,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.0498887651569859,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.050211673156867795,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.04793724854411019,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.5780346820809249,
						"acc_stderr,none": 0.0376574669386515,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.43137254901960786,
						"acc_stderr,none": 0.04928099597287534,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.4425531914893617,
						"acc_stderr,none": 0.03246956919789958,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.044346007015849245,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.503448275862069,
						"acc_stderr,none": 0.041665675771015785,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.3306878306878307,
						"acc_stderr,none": 0.024229965298425086,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.36507936507936506,
						"acc_stderr,none": 0.043062412591271526,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.026069362295335134,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.4236453201970443,
						"acc_stderr,none": 0.03476725747649037,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.04999999999999999,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.7090909090909091,
						"acc_stderr,none": 0.03546563019624335,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.7626262626262627,
						"acc_stderr,none": 0.030313710538198896,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.7772020725388601,
						"acc_stderr,none": 0.03003114797764154,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.5820512820512821,
						"acc_stderr,none": 0.025007329882461213,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.31851851851851853,
						"acc_stderr,none": 0.02840653309060846,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.5756302521008403,
						"acc_stderr,none": 0.032104790510157764,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.31125827814569534,
						"acc_stderr,none": 0.03780445850526733,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.7889908256880734,
						"acc_stderr,none": 0.01749392240411265,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.5462962962962963,
						"acc_stderr,none": 0.033953227263757976,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.7352941176470589,
						"acc_stderr,none": 0.03096451792692341,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.7932489451476793,
						"acc_stderr,none": 0.026361651668389094,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.6457399103139013,
						"acc_stderr,none": 0.03210062154134987,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.6564885496183206,
						"acc_stderr,none": 0.041649760719448786,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.5086078639744952,
						"acc_stderr,none": 0.14622754639234883,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.6859504132231405,
						"acc_stderr,none": 0.042369647530410184,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.7222222222222222,
						"acc_stderr,none": 0.04330043749650742,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.656441717791411,
						"acc_stderr,none": 0.03731133519673893,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.04745789978762494,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.7281553398058253,
						"acc_stderr,none": 0.044052680241409216,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.8076923076923077,
						"acc_stderr,none": 0.025819233256483724,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.7407407407407407,
						"acc_stderr,none": 0.01567100600933957,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.6127167630057804,
						"acc_stderr,none": 0.02622615860512465,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24022346368715083,
						"acc_stderr,none": 0.014288343803925305,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.6143790849673203,
						"acc_stderr,none": 0.02787074527829027,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.6266495011264885,
						"acc_stderr,none": 0.10678706055887757,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.6463022508038585,
						"acc_stderr,none": 0.027155208103200875,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.5987654320987654,
						"acc_stderr,none": 0.027272582849839792,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.4219858156028369,
						"acc_stderr,none": 0.029462189233370597,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.43415906127770537,
						"acc_stderr,none": 0.01265903323706725,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.5735294117647058,
						"acc_stderr,none": 0.030042615832714867,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.5735294117647058,
						"acc_stderr,none": 0.020007912739359368,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.0469237132203465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.6326530612244898,
						"acc_stderr,none": 0.030862144921087555,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.6626584335391615,
						"acc_stderr,none": 0.09958788043918582,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.8208955223880597,
						"acc_stderr,none": 0.027113286753111837,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.47288296860133205,
						"acc_stderr,none": 0.11294065176078537,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.03775251680686371,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.4819277108433735,
						"acc_stderr,none": 0.038899512528272166,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.7953216374269005,
						"acc_stderr,none": 0.030944459778533204,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.8343352012226184,
						"acc_stderr,none": 0.0037528601038379914,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.8311635475996745,
						"acc_stderr,none": 0.0037781352873794936,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7083333333333334,
						"acc_stderr,none": 0.02253019934687401,
						"alias": " - mrpc",
						"f1,none": 0.8231797919762258,
						"f1_stderr,none": 0.016000145387874306
					},
					"openbookqa": {
						"acc,none": 0.342,
						"acc_norm,none": 0.456,
						"acc_norm_stderr,none": 0.022296238348407056,
						"acc_stderr,none": 0.02123614719989926,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.3965,
						"acc_stderr,none": 0.010940919836258157,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3265,
						"acc_stderr,none": 0.010488273305862498,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3585,
						"acc_stderr,none": 0.010725968403790009,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5365,
						"acc_stderr,none": 0.01115329875133434,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.011150792352341666,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.434,
						"acc_stderr,none": 0.011085280407858918,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.436,
						"acc_stderr,none": 0.011091145421162655,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4322857142857143,
						"acc_stderr,none": 0.060629863136872464,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.8003264417845484,
						"acc_norm,none": 0.8139281828073993,
						"acc_norm_stderr,none": 0.009079851894097846,
						"acc_stderr,none": 0.009326942154519176,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7875356171897289,
						"acc_norm,none": 0.676339223864274,
						"acc_norm_stderr,none": 0.008969816217225295,
						"acc_stderr,none": 0.13903219518493298,
						"alias": "pythia",
						"bits_per_byte,none": 0.6094015345394527,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5256262103243146,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.001806138591158,
						"perplexity_stderr,none": 0.0548083995780737,
						"word_perplexity,none": 9.571497284071752,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.4946000366099213,
						"acc_stderr,none": 0.006765015986877456,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6020529309918378,
						"acc_stderr,none": 0.002434352655343179,
						"alias": " - qqp",
						"f1,none": 0.644449846412234,
						"f1_stderr,none": 0.0026265422177656423
					},
					"record": {
						"alias": "record",
						"em,none": 0.2666,
						"em_stderr,none": 0.004422035666969035,
						"f1,none": 0.2756199999999998,
						"f1_stderr,none": 0.004432690904754269
					},
					"rte": {
						"acc,none": 0.7111913357400722,
						"acc_stderr,none": 0.027279964226856374,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.935,
						"acc_norm,none": 0.931,
						"acc_norm_stderr,none": 0.008018934050315145,
						"acc_stderr,none": 0.007799733061832032,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.7729357798165137,
						"acc_stderr,none": 0.014195051776717411,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.3927196337205575,
						"acc_stderr,none": 0.001539526203967856,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.44063647490820074,
						"bleu_acc_stderr,none": 0.01737969755543745,
						"bleu_diff,none": -1.2533518346355572,
						"bleu_diff_stderr,none": 0.8689505310271912,
						"bleu_max,none": 28.341628601169912,
						"bleu_max_stderr,none": 0.8237729833003474,
						"rouge1_acc,none": 0.45777233782129745,
						"rouge1_acc_stderr,none": 0.01744096571248212,
						"rouge1_diff,none": -0.8908174310569977,
						"rouge1_diff_stderr,none": 1.0247086367622387,
						"rouge1_max,none": 54.90593082611275,
						"rouge1_max_stderr,none": 0.8355289806404766,
						"rouge2_acc,none": 0.3463892288861689,
						"rouge2_acc_stderr,none": 0.01665699710912513,
						"rouge2_diff,none": -3.2631442564857225,
						"rouge2_diff_stderr,none": 1.194783861109434,
						"rouge2_max,none": 38.68966059347235,
						"rouge2_max_stderr,none": 1.0413854566237415,
						"rougeL_acc,none": 0.44063647490820074,
						"rougeL_acc_stderr,none": 0.01737969755543745,
						"rougeL_diff,none": -1.081620037683707,
						"rougeL_diff_stderr,none": 1.0438889153268645,
						"rougeL_max,none": 51.884226949620064,
						"rougeL_max_stderr,none": 0.8556442714889916
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.44063647490820074,
						"bleu_acc_stderr,none": 0.01737969755543745,
						"bleu_diff,none": -1.2533518346355572,
						"bleu_diff_stderr,none": 0.8689505310271912,
						"bleu_max,none": 28.341628601169912,
						"bleu_max_stderr,none": 0.8237729833003474,
						"rouge1_acc,none": 0.45777233782129745,
						"rouge1_acc_stderr,none": 0.01744096571248212,
						"rouge1_diff,none": -0.8908174310569977,
						"rouge1_diff_stderr,none": 1.0247086367622387,
						"rouge1_max,none": 54.90593082611275,
						"rouge1_max_stderr,none": 0.8355289806404766,
						"rouge2_acc,none": 0.3463892288861689,
						"rouge2_acc_stderr,none": 0.01665699710912513,
						"rouge2_diff,none": -3.2631442564857225,
						"rouge2_diff_stderr,none": 1.194783861109434,
						"rouge2_max,none": 38.68966059347235,
						"rouge2_max_stderr,none": 1.0413854566237415,
						"rougeL_acc,none": 0.44063647490820074,
						"rougeL_acc_stderr,none": 0.01737969755543745,
						"rougeL_diff,none": -1.081620037683707,
						"rougeL_diff_stderr,none": 1.0438889153268645,
						"rougeL_max,none": 51.884226949620064,
						"rougeL_max_stderr,none": 0.8556442714889916
					},
					"truthfulqa_mc1": {
						"acc,none": 0.32068543451652387,
						"acc_stderr,none": 0.016339170373280906,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4647538329245911,
						"acc_stderr,none": 0.014708099039415667,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.6094015345394527,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5256262103243146,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 9.571497284071752,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.744277821625888,
						"acc_stderr,none": 0.012261253845440474,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.49295774647887325,
						"acc_stderr,none": 0.059755502635482904,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.647090909090909,
						"acc_stderr,none": 0.07959652641172524,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.636,
						"acc_stderr,none": 0.021539170637317695,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.022311333245289663,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.019536923574747598,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.018359797502387018,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.02238235778196214,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.022122993778135404,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.021814300984787635,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.02206494331392886,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.666,
						"acc_stderr,none": 0.02111349234774372,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.019384310743640384,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.019920483209566072,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4403748326639893,
						"acc_stderr,none": 0.04728209406058379,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.009460223484996469,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4598393574297189,
						"acc_stderr,none": 0.009989691810169662,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4839357429718876,
						"acc_stderr,none": 0.010016898932355682,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.40441767068273093,
						"acc_stderr,none": 0.00983724562545301,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5365461847389559,
						"acc_stderr,none": 0.009995265580368921,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4815261044176707,
						"acc_stderr,none": 0.01001522976835699,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4911646586345382,
						"acc_stderr,none": 0.01002050803376262,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.4248995983935743,
						"acc_stderr,none": 0.0099083775681982,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4887550200803213,
						"acc_stderr,none": 0.01001953797297507,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.42208835341365464,
						"acc_stderr,none": 0.00989965271489543,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.39718875502008033,
						"acc_stderr,none": 0.00980791507067729,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4674698795180723,
						"acc_stderr,none": 0.010000839483876025,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.4469879518072289,
						"acc_stderr,none": 0.009965584062546167,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41767068273092367,
						"acc_stderr,none": 0.009885277727840166,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3481927710843373,
						"acc_stderr,none": 0.00954898064915339,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6612718849648035,
						"acc_stderr,none": 0.059112793830991224,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.6492389146260754,
						"acc_stderr,none": 0.012280597585639617,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.784910655195235,
						"acc_stderr,none": 0.010573799888260045,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7458636664460622,
						"acc_stderr,none": 0.011204039524653531,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5982792852415619,
						"acc_stderr,none": 0.012616114526927904,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6353408338848445,
						"acc_stderr,none": 0.012386781532906168,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.700860357379219,
						"acc_stderr,none": 0.011783227411626317,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5684976836532097,
						"acc_stderr,none": 0.012745810046098411,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.7332892124420913,
						"acc_stderr,none": 0.011380712839734484,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5737921906022502,
						"acc_stderr,none": 0.0127262234506279,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.6048974189278623,
						"acc_stderr,none": 0.01258077297613326,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6790205162144275,
						"acc_stderr,none": 0.012014110213469814,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8426612721959991,
						"acc_stderr,none": 0.03507169622463686,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8980645161290323,
						"acc_stderr,none": 0.0062762235059763095,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7349397590361446,
						"acc_stderr,none": 0.04874064133109369,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7893639207507821,
						"acc_stderr,none": 0.013174139649278138,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.8060836501901141,
						"acc_stderr,none": 0.02442567782008498,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.707936507936508,
						"acc_stderr,none": 0.02566084582577462,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.8095238095238095,
						"acc_stderr,none": 0.017508589845145833,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "m8than/Finch-14B-Final2"
	},
	"m8than/FinchX-Med": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6527621195039459,
						"acc_norm,none": 0.6431792559188275,
						"acc_norm_stderr,none": 0.07938972032889308,
						"acc_stderr,none": 0.10613987157062768,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4625,
						"acc_stderr,none": 0.045266618521850016,
						"alias": "anli"
					},
					"blimp": {
						"acc,none": 0.8227761194029852,
						"acc_stderr,none": 0.13606311499425142,
						"alias": " - blimp"
					},
					"cmmlu": {
						"acc,none": 0.3087549646002418,
						"acc_norm,none": 0.3087549646002418,
						"acc_norm_stderr,none": 0.05788408541131644,
						"acc_stderr,none": 0.05788408541131644,
						"alias": "cmmlu"
					},
					"glue": {
						"acc,none": 0.6985469271081467,
						"acc_stderr,none": 0.0031934574274837552,
						"alias": "glue",
						"f1,none": 0.5101845701962574,
						"f1_stderr,none": 0.0009352752422202518,
						"mcc,none": 0.10240027657242429,
						"mcc_stderr,none": 0.03306591562071735
					},
					"lambada": {
						"acc,none": 0.7200659809819523,
						"acc_stderr,none": 0.016418239205527346,
						"alias": "lambada",
						"perplexity,none": 3.5330865404075995,
						"perplexity_stderr,none": 0.16067638520987035
					},
					"lambada_multilingual": {
						"acc,none": 0.5328158354356686,
						"acc_stderr,none": 0.0878378202197205,
						"alias": "lambada_multilingual",
						"perplexity,none": 22.286600594329414,
						"perplexity_stderr,none": 8.481649253464367
					},
					"mmlu": {
						"acc,none": 0.4392536675687224,
						"acc_stderr,none": 0.09622791212394333,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.40807651434643993,
						"acc_stderr,none": 0.09425961131920245,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.5011264885741873,
						"acc_stderr,none": 0.08199687599600157,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.49528761780955477,
						"acc_stderr,none": 0.08736389012968614,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.37012369172216936,
						"acc_stderr,none": 0.08563568173481621,
						"alias": " - stem"
					},
					"pawsx": {
						"acc,none": 0.48414285714285715,
						"acc_stderr,none": 0.05252924848651583,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7501035909616529,
						"acc_norm,none": 0.6469770436578927,
						"acc_norm_stderr,none": 0.008283332722837067,
						"acc_stderr,none": 0.13449135173208024,
						"alias": "pythia",
						"bits_per_byte,none": 0.632139855498008,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5498620989564158,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2429975954916554,
						"perplexity_stderr,none": 0.06233698240179353,
						"word_perplexity,none": 10.41316462664955,
						"word_perplexity_stderr,none": "N/A"
					},
					"truthfulqa": {
						"acc,none": 0.31544186776937666,
						"acc_stderr,none": 0.001374185878540407,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.30966952264381886,
						"bleu_acc_stderr,none": 0.01618574435514492,
						"bleu_diff,none": -7.859860900269691,
						"bleu_diff_stderr,none": 0.8222963466121774,
						"bleu_max,none": 26.987244075193836,
						"bleu_max_stderr,none": 0.7941428468729818,
						"rouge1_acc,none": 0.2839657282741738,
						"rouge1_acc_stderr,none": 0.015785370858396736,
						"rouge1_diff,none": -10.02060741980673,
						"rouge1_diff_stderr,none": 0.880297959586132,
						"rouge1_max,none": 52.758134839292545,
						"rouge1_max_stderr,none": 0.844118667207512,
						"rouge2_acc,none": 0.2594859241126071,
						"rouge2_acc_stderr,none": 0.015345409485557985,
						"rouge2_diff,none": -11.906284194394052,
						"rouge2_diff_stderr,none": 1.0746410323905284,
						"rouge2_max,none": 36.76129516350012,
						"rouge2_max_stderr,none": 0.9925060034973101,
						"rougeL_acc,none": 0.2937576499388005,
						"rougeL_acc_stderr,none": 0.015945068581236614,
						"rougeL_diff,none": -9.993084014148303,
						"rougeL_diff_stderr,none": 0.899770804733756,
						"rougeL_max,none": 49.81362453814381,
						"rougeL_max_stderr,none": 0.8572901458461103
					},
					"xcopa": {
						"acc,none": 0.6232727272727273,
						"acc_stderr,none": 0.06962250315450383,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4350736278447122,
						"acc_stderr,none": 0.05023326746116526,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6335960531857289,
						"acc_stderr,none": 0.062072404024590994,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8096201393571589,
						"acc_stderr,none": 0.040039563220538706,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6527621195039459,
						"acc_norm,none": 0.6431792559188275,
						"acc_norm_stderr,none": 0.07938972032889308,
						"acc_stderr,none": 0.10613987157062768,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4625,
						"acc_stderr,none": 0.045266618521850016,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.553,
						"acc_stderr,none": 0.01573017604600907,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.0157049879543618,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.4058333333333333,
						"acc_stderr,none": 0.014181377176527047,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4283276450511945,
						"acc_norm,none": 0.4761092150170648,
						"acc_norm_stderr,none": 0.014594701798071654,
						"acc_stderr,none": 0.014460496367599026,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7634680134680135,
						"acc_norm,none": 0.7255892255892256,
						"acc_norm_stderr,none": 0.00915617712224452,
						"acc_stderr,none": 0.008719840797175745,
						"alias": "  - arc_easy"
					},
					"blimp": {
						"acc,none": 0.8227761194029852,
						"acc_stderr,none": 0.13606311499425142,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406728,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.829,
						"acc_stderr,none": 0.011912216456264604,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024971,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.012975838021968776,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.015625625112620667,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037191,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.012510816141264362,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844882,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319419,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.0073351758537068355,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033844,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666679,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796396,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.00863212103213999,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275291,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291603,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.012559527926707368,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.762,
						"acc_stderr,none": 0.013473586661967232,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.762,
						"acc_stderr,none": 0.013473586661967222,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832011,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.0046408552592747026,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890162,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.012583693787968123,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.713,
						"acc_stderr,none": 0.014312087053809963,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.837,
						"acc_stderr,none": 0.011686212712746849,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.654,
						"acc_stderr,none": 0.015050266127564448,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.836,
						"acc_stderr,none": 0.011715000693181331,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745908,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.77,
						"acc_stderr,none": 0.01331455133593595,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.665,
						"acc_stderr,none": 0.014933117490932575,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286419,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.445,
						"acc_stderr,none": 0.01572330188676094,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.637,
						"acc_stderr,none": 0.015213890444671287,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.711,
						"acc_stderr,none": 0.014341711358296181,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.00774364022691929,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.648,
						"acc_stderr,none": 0.015110404505648658,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.010131468138756993,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904633,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.013106173040661763,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406729,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578159,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866447,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.653,
						"acc_stderr,none": 0.015060472031706624,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.643,
						"acc_stderr,none": 0.015158521721486774,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151125,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341676,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656807,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.713,
						"acc_stderr,none": 0.014312087053809961,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.452,
						"acc_stderr,none": 0.015746235865880677,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792948,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.645,
						"acc_stderr,none": 0.015139491543780532,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.678,
						"acc_stderr,none": 0.014782913600996676,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.011171786285496497,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747384,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.667,
						"acc_stderr,none": 0.014910846164229852,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160328,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706826,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074796,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.0050348137353182255,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.00589395781616554,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.425,
						"acc_stderr,none": 0.01564032031704011,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.357,
						"acc_stderr,none": 0.015158521721486767,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"cmmlu": {
						"acc,none": 0.3087549646002418,
						"acc_norm,none": 0.3087549646002418,
						"acc_norm_stderr,none": 0.05788408541131644,
						"acc_stderr,none": 0.05788408541131644,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.30177514792899407,
						"acc_norm,none": 0.30177514792899407,
						"acc_norm_stderr,none": 0.03541479614288121,
						"acc_stderr,none": 0.03541479614288121,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2635135135135135,
						"acc_norm,none": 0.2635135135135135,
						"acc_norm_stderr,none": 0.036335000433819875,
						"acc_stderr,none": 0.036335000433819875,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.03445000289173461,
						"acc_stderr,none": 0.03445000289173461,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.4125,
						"acc_norm,none": 0.4125,
						"acc_norm_stderr,none": 0.03904067786683382,
						"acc_stderr,none": 0.03904067786683382,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.0340150671524904,
						"acc_stderr,none": 0.0340150671524904,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.033354517532061055,
						"acc_stderr,none": 0.033354517532061055,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3511450381679389,
						"acc_norm,none": 0.3511450381679389,
						"acc_norm_stderr,none": 0.04186445163013751,
						"acc_stderr,none": 0.04186445163013751,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3161764705882353,
						"acc_norm,none": 0.3161764705882353,
						"acc_norm_stderr,none": 0.040019338846834944,
						"acc_stderr,none": 0.040019338846834944,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3364485981308411,
						"acc_norm,none": 0.3364485981308411,
						"acc_norm_stderr,none": 0.045892711114716274,
						"acc_stderr,none": 0.045892711114716274,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.3281733746130031,
						"acc_norm,none": 0.3281733746130031,
						"acc_norm_stderr,none": 0.02616690401755083,
						"acc_stderr,none": 0.02616690401755083,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28921568627450983,
						"acc_norm,none": 0.28921568627450983,
						"acc_norm_stderr,none": 0.03182231867647555,
						"acc_stderr,none": 0.03182231867647555,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3575418994413408,
						"acc_norm,none": 0.3575418994413408,
						"acc_norm_stderr,none": 0.03592327103931582,
						"acc_stderr,none": 0.03592327103931582,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25738396624472576,
						"acc_norm,none": 0.25738396624472576,
						"acc_norm_stderr,none": 0.02845882099146029,
						"acc_stderr,none": 0.02845882099146029,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.04350546818999062,
						"acc_stderr,none": 0.04350546818999062,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.411214953271028,
						"acc_norm,none": 0.411214953271028,
						"acc_norm_stderr,none": 0.04779251692801369,
						"acc_stderr,none": 0.04779251692801369,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.36792452830188677,
						"acc_norm,none": 0.36792452830188677,
						"acc_norm_stderr,none": 0.047061871107614554,
						"acc_stderr,none": 0.047061871107614554,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.040191074725573483,
						"acc_stderr,none": 0.040191074725573483,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.04176466758604902,
						"acc_stderr,none": 0.04176466758604902,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800375,
						"acc_stderr,none": 0.04142972007800375,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.28205128205128205,
						"acc_norm,none": 0.28205128205128205,
						"acc_norm_stderr,none": 0.02728514708163732,
						"acc_stderr,none": 0.02728514708163732,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.35294117647058826,
						"acc_norm,none": 0.35294117647058826,
						"acc_norm_stderr,none": 0.03354092437591519,
						"acc_stderr,none": 0.03354092437591519,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2807017543859649,
						"acc_norm,none": 0.2807017543859649,
						"acc_norm_stderr,none": 0.034462962170884265,
						"acc_stderr,none": 0.034462962170884265,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2925170068027211,
						"acc_norm,none": 0.2925170068027211,
						"acc_norm_stderr,none": 0.03764931984085173,
						"acc_stderr,none": 0.03764931984085173,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2733812949640288,
						"acc_norm,none": 0.2733812949640288,
						"acc_norm_stderr,none": 0.0379400712153362,
						"acc_stderr,none": 0.0379400712153362,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.34591194968553457,
						"acc_norm,none": 0.34591194968553457,
						"acc_norm_stderr,none": 0.0378418488414083,
						"acc_stderr,none": 0.0378418488414083,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3312883435582822,
						"acc_norm,none": 0.3312883435582822,
						"acc_norm_stderr,none": 0.03697983910025588,
						"acc_stderr,none": 0.03697983910025588,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.29069767441860467,
						"acc_norm,none": 0.29069767441860467,
						"acc_norm_stderr,none": 0.034724693044775976,
						"acc_stderr,none": 0.034724693044775976,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2976190476190476,
						"acc_norm,none": 0.2976190476190476,
						"acc_norm_stderr,none": 0.028858905984721215,
						"acc_stderr,none": 0.028858905984721215,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.29797979797979796,
						"acc_norm,none": 0.29797979797979796,
						"acc_norm_stderr,none": 0.03258630383836555,
						"acc_stderr,none": 0.03258630383836555,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.47058823529411764,
						"acc_norm,none": 0.47058823529411764,
						"acc_norm_stderr,none": 0.03242225027115007,
						"acc_stderr,none": 0.03242225027115007,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2826086956521739,
						"acc_norm,none": 0.2826086956521739,
						"acc_norm_stderr,none": 0.02975452853823324,
						"acc_stderr,none": 0.02975452853823324,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.3037037037037037,
						"acc_norm,none": 0.3037037037037037,
						"acc_norm_stderr,none": 0.03972552884785138,
						"acc_stderr,none": 0.03972552884785138,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.32167832167832167,
						"acc_norm,none": 0.32167832167832167,
						"acc_norm_stderr,none": 0.03919986517659165,
						"acc_stderr,none": 0.03919986517659165,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2840909090909091,
						"acc_norm,none": 0.2840909090909091,
						"acc_norm_stderr,none": 0.034090909090909075,
						"acc_stderr,none": 0.034090909090909075,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.31543624161073824,
						"acc_norm,none": 0.31543624161073824,
						"acc_norm_stderr,none": 0.03819723167141383,
						"acc_stderr,none": 0.03819723167141383,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2796610169491525,
						"acc_norm,none": 0.2796610169491525,
						"acc_norm_stderr,none": 0.04149459161011112,
						"acc_stderr,none": 0.04149459161011112,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.04172343038705383,
						"acc_stderr,none": 0.04172343038705383,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.34265734265734266,
						"acc_norm,none": 0.34265734265734266,
						"acc_norm_stderr,none": 0.03982738177809643,
						"acc_stderr,none": 0.03982738177809643,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.30952380952380953,
						"acc_norm,none": 0.30952380952380953,
						"acc_norm_stderr,none": 0.04134913018303316,
						"acc_stderr,none": 0.04134913018303316,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.032092816451453864,
						"acc_stderr,none": 0.032092816451453864,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3372093023255814,
						"acc_norm,none": 0.3372093023255814,
						"acc_norm_stderr,none": 0.03615263198871638,
						"acc_stderr,none": 0.03615263198871638,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2749391727493917,
						"acc_norm,none": 0.2749391727493917,
						"acc_norm_stderr,none": 0.022050254355995075,
						"acc_stderr,none": 0.022050254355995075,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.4392523364485981,
						"acc_norm,none": 0.4392523364485981,
						"acc_norm_stderr,none": 0.03400564171454575,
						"acc_stderr,none": 0.03400564171454575,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3170731707317073,
						"acc_norm,none": 0.3170731707317073,
						"acc_norm_stderr,none": 0.04212955964853051,
						"acc_stderr,none": 0.04212955964853051,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.29508196721311475,
						"acc_norm,none": 0.29508196721311475,
						"acc_norm_stderr,none": 0.04146178164901212,
						"acc_stderr,none": 0.04146178164901212,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3523809523809524,
						"acc_norm,none": 0.3523809523809524,
						"acc_norm_stderr,none": 0.03304401999334815,
						"acc_stderr,none": 0.03304401999334815,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.31666666666666665,
						"acc_norm,none": 0.31666666666666665,
						"acc_norm_stderr,none": 0.034768900963930385,
						"acc_stderr,none": 0.034768900963930385,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3492063492063492,
						"acc_norm,none": 0.3492063492063492,
						"acc_norm_stderr,none": 0.034768327088204216,
						"acc_stderr,none": 0.034768327088204216,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3017241379310345,
						"acc_norm,none": 0.3017241379310345,
						"acc_norm_stderr,none": 0.04280254792505459,
						"acc_stderr,none": 0.04280254792505459,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.30344827586206896,
						"acc_norm,none": 0.30344827586206896,
						"acc_norm_stderr,none": 0.038312260488503336,
						"acc_stderr,none": 0.038312260488503336,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3047619047619048,
						"acc_norm,none": 0.3047619047619048,
						"acc_norm_stderr,none": 0.0451367671816831,
						"acc_stderr,none": 0.0451367671816831,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2914285714285714,
						"acc_norm,none": 0.2914285714285714,
						"acc_norm_stderr,none": 0.034449526562290195,
						"acc_stderr,none": 0.034449526562290195,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.03046967065084667,
						"acc_stderr,none": 0.03046967065084667,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2632978723404255,
						"acc_norm,none": 0.2632978723404255,
						"acc_norm_stderr,none": 0.022743327388426438,
						"acc_stderr,none": 0.022743327388426438,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3620689655172414,
						"acc_norm,none": 0.3620689655172414,
						"acc_norm_stderr,none": 0.03162106740099062,
						"acc_stderr,none": 0.03162106740099062,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.03613730415279119,
						"acc_stderr,none": 0.03613730415279119,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.31851851851851853,
						"acc_norm,none": 0.31851851851851853,
						"acc_norm_stderr,none": 0.0402477840197711,
						"acc_stderr,none": 0.0402477840197711,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3274336283185841,
						"acc_norm,none": 0.3274336283185841,
						"acc_norm_stderr,none": 0.031285129400738305,
						"acc_stderr,none": 0.031285129400738305,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3212121212121212,
						"acc_norm,none": 0.3212121212121212,
						"acc_norm_stderr,none": 0.03646204963253812,
						"acc_stderr,none": 0.03646204963253812,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.03231470996617758,
						"acc_stderr,none": 0.03231470996617758,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3105590062111801,
						"acc_norm,none": 0.3105590062111801,
						"acc_norm_stderr,none": 0.036581425432887386,
						"acc_stderr,none": 0.036581425432887386,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.3375,
						"acc_norm,none": 0.3375,
						"acc_norm_stderr,none": 0.03749999999999997,
						"acc_stderr,none": 0.03749999999999997,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.10240027657242429,
						"mcc_stderr,none": 0.03306591562071735
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.03379976689896309,
						"alias": "copa"
					},
					"glue": {
						"acc,none": 0.6985469271081467,
						"acc_stderr,none": 0.0031934574274837552,
						"alias": "glue",
						"f1,none": 0.5101845701962574,
						"f1_stderr,none": 0.0009352752422202518,
						"mcc,none": 0.10240027657242429,
						"mcc_stderr,none": 0.03306591562071735
					},
					"hellaswag": {
						"acc,none": 0.5465046803425613,
						"acc_norm,none": 0.7346146186018722,
						"acc_norm_stderr,none": 0.004406358190678485,
						"acc_stderr,none": 0.004968151878211048,
						"alias": "hellaswag"
					},
					"lambada": {
						"acc,none": 0.7200659809819523,
						"acc_stderr,none": 0.016418239205527346,
						"alias": "lambada",
						"perplexity,none": 3.5330865404075995,
						"perplexity_stderr,none": 0.16067638520987035
					},
					"lambada_multilingual": {
						"acc,none": 0.5328158354356686,
						"acc_stderr,none": 0.0878378202197205,
						"alias": "lambada_multilingual",
						"perplexity,none": 22.286600594329414,
						"perplexity_stderr,none": 8.481649253464367
					},
					"lambada_openai": {
						"acc,none": 0.7500485154279061,
						"acc_stderr,none": 0.0060323233232559845,
						"alias": " - lambada_openai",
						"perplexity,none": 3.2429975954916554,
						"perplexity_stderr,none": 0.06233698240179353
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4308169998059383,
						"acc_stderr,none": 0.006898973060283536,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.6569247618461,
						"perplexity_stderr,none": 1.9536893868465088
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7504366388511546,
						"acc_stderr,none": 0.006029197365300718,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.242929909511006,
						"perplexity_stderr,none": 0.06233900445133314
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4403260236755288,
						"acc_stderr,none": 0.006916188259769203,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 30.809159649809324,
						"perplexity_stderr,none": 1.548473964878433
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5495827673200078,
						"acc_stderr,none": 0.006931642009240898,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.68556834877217,
						"perplexity_stderr,none": 0.8216054796070549
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.4929167475257132,
						"acc_stderr,none": 0.006965278621568839,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 26.03842030170847,
						"perplexity_stderr,none": 1.4695514175038529
					},
					"lambada_standard": {
						"acc,none": 0.6896953231127498,
						"acc_stderr,none": 0.006445177376219963,
						"alias": " - lambada_standard",
						"perplexity,none": 3.8226753085433427,
						"perplexity_stderr,none": 0.07623225457469221
					},
					"logiqa": {
						"acc,none": 0.2488479262672811,
						"acc_norm,none": 0.29339477726574503,
						"acc_norm_stderr,none": 0.017859032704399497,
						"acc_stderr,none": 0.016957985904525585,
						"alias": " - logiqa"
					},
					"mmlu": {
						"acc,none": 0.4392536675687224,
						"acc_stderr,none": 0.09622791212394333,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4740740740740741,
						"acc_stderr,none": 0.04313531696750574,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.4144736842105263,
						"acc_stderr,none": 0.04008973785779206,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145633,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4830188679245283,
						"acc_stderr,none": 0.030755120364119898,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4722222222222222,
						"acc_stderr,none": 0.04174752578923183,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3872832369942196,
						"acc_stderr,none": 0.03714325906302065,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.3235294117647059,
						"acc_stderr,none": 0.046550104113196177,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.05024183937956913,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.42127659574468085,
						"acc_stderr,none": 0.03227834510146267,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.04434600701584925,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.46206896551724136,
						"acc_stderr,none": 0.04154659671707546,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.29894179894179895,
						"acc_stderr,none": 0.0235776047916558,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.042163702135578345,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.5129032258064516,
						"acc_stderr,none": 0.028434533152681855,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.3497536945812808,
						"acc_stderr,none": 0.033554009049695646,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.04999999999999999,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5818181818181818,
						"acc_stderr,none": 0.03851716319398394,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.51010101010101,
						"acc_stderr,none": 0.035616254886737454,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5699481865284974,
						"acc_stderr,none": 0.035729543331448066,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.36923076923076925,
						"acc_stderr,none": 0.02446861524147892,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.28888888888888886,
						"acc_stderr,none": 0.027634907264178544,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.36134453781512604,
						"acc_stderr,none": 0.031204691225150016,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23178807947019867,
						"acc_stderr,none": 0.034454062719870546,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.6073394495412844,
						"acc_stderr,none": 0.020937505161201093,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2361111111111111,
						"acc_stderr,none": 0.028963702570791026,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5098039215686274,
						"acc_stderr,none": 0.035086373586305716,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5569620253164557,
						"acc_stderr,none": 0.032335327775334835,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4618834080717489,
						"acc_stderr,none": 0.03346015011973228,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5648854961832062,
						"acc_stderr,none": 0.04348208051644858,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.40807651434643993,
						"acc_stderr,none": 0.09425961131920245,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.47107438016528924,
						"acc_stderr,none": 0.04556710331269498,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5185185185185185,
						"acc_stderr,none": 0.0483036602463533,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.44785276073619634,
						"acc_stderr,none": 0.03906947479456601,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.04287858751340457,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.5339805825242718,
						"acc_stderr,none": 0.0493929144727348,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6623931623931624,
						"acc_stderr,none": 0.030980296992618558,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.05016135580465919,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.6206896551724138,
						"acc_stderr,none": 0.017351268117544453,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.44508670520231214,
						"acc_stderr,none": 0.026756255129663772,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23016759776536314,
						"acc_stderr,none": 0.014078339253425812,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.48366013071895425,
						"acc_stderr,none": 0.028614624752805413,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.5011264885741873,
						"acc_stderr,none": 0.08199687599600157,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.5369774919614148,
						"acc_stderr,none": 0.02832032583010591,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.5370370370370371,
						"acc_stderr,none": 0.027744313443376536,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.3546099290780142,
						"acc_stderr,none": 0.028538650028878645,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3533246414602347,
						"acc_stderr,none": 0.01220840821108243,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.48161764705882354,
						"acc_stderr,none": 0.03035230339535196,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.4526143790849673,
						"acc_stderr,none": 0.020136790918492537,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4818181818181818,
						"acc_stderr,none": 0.04785964010794916,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.42448979591836733,
						"acc_stderr,none": 0.031642094879429414,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.49528761780955477,
						"acc_stderr,none": 0.08736389012968614,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.6915422885572139,
						"acc_stderr,none": 0.032658195885126966,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.37012369172216936,
						"acc_stderr,none": 0.08563568173481621,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.67,
						"acc_stderr,none": 0.04725815626252609,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.39156626506024095,
						"acc_stderr,none": 0.03799857454479636,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6842105263157895,
						"acc_stderr,none": 0.03565079670708311,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.7427407030056037,
						"acc_stderr,none": 0.004412463486904445,
						"alias": " - mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.7462367778681855,
						"acc_stderr,none": 0.004388881111484902,
						"alias": " - mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.021463642763705344,
						"alias": " - mrpc",
						"f1,none": 0.8386075949367089,
						"f1_stderr,none": 0.01576652065498808
					},
					"openbookqa": {
						"acc,none": 0.318,
						"acc_norm,none": 0.422,
						"acc_norm_stderr,none": 0.022109039310618556,
						"acc_stderr,none": 0.02084757162081401,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.435,
						"acc_stderr,none": 0.011088235860011597,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3725,
						"acc_stderr,none": 0.010813433320184786,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4435,
						"acc_stderr,none": 0.011111507899646485,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5475,
						"acc_stderr,none": 0.011132557743886098,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.011150792352341657,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.011172305500884872,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5305,
						"acc_stderr,none": 0.011162310405413182,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48414285714285715,
						"acc_stderr,none": 0.05252924848651583,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.780739934711643,
						"acc_norm,none": 0.7959738846572362,
						"acc_norm_stderr,none": 0.009402378102942638,
						"acc_stderr,none": 0.009653357463605326,
						"alias": " - piqa"
					},
					"pythia": {
						"acc,none": 0.7501035909616529,
						"acc_norm,none": 0.6469770436578927,
						"acc_norm_stderr,none": 0.008283332722837067,
						"acc_stderr,none": 0.13449135173208024,
						"alias": "pythia",
						"bits_per_byte,none": 0.632139855498008,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5498620989564158,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2429975954916554,
						"perplexity_stderr,none": 0.06233698240179353,
						"word_perplexity,none": 10.41316462664955,
						"word_perplexity_stderr,none": "N/A"
					},
					"qnli": {
						"acc,none": 0.49478308621636463,
						"acc_stderr,none": 0.006765042284363289,
						"alias": " - qnli"
					},
					"qqp": {
						"acc,none": 0.6995300519416275,
						"acc_stderr,none": 0.002280117404297572,
						"alias": " - qqp",
						"f1,none": 0.5073404169032363,
						"f1_stderr,none": 0.0038912840432811747
					},
					"record": {
						"alias": "record",
						"em,none": 0.2749,
						"em_stderr,none": 0.0044648619798660655,
						"f1,none": 0.28507523835003373,
						"f1_stderr,none": 0.004474456684105578
					},
					"rte": {
						"acc,none": 0.6570397111913358,
						"acc_stderr,none": 0.02857348326765378,
						"alias": " - rte"
					},
					"sciq": {
						"acc,none": 0.951,
						"acc_norm,none": 0.93,
						"acc_norm_stderr,none": 0.008072494358323488,
						"acc_stderr,none": 0.006829761756140926,
						"alias": " - sciq"
					},
					"sst2": {
						"acc,none": 0.9036697247706422,
						"acc_stderr,none": 0.009997172579825117,
						"alias": " - sst2"
					},
					"truthfulqa": {
						"acc,none": 0.31544186776937666,
						"acc_stderr,none": 0.001374185878540407,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.30966952264381886,
						"bleu_acc_stderr,none": 0.01618574435514492,
						"bleu_diff,none": -7.859860900269691,
						"bleu_diff_stderr,none": 0.8222963466121774,
						"bleu_max,none": 26.987244075193836,
						"bleu_max_stderr,none": 0.7941428468729818,
						"rouge1_acc,none": 0.2839657282741738,
						"rouge1_acc_stderr,none": 0.015785370858396736,
						"rouge1_diff,none": -10.02060741980673,
						"rouge1_diff_stderr,none": 0.880297959586132,
						"rouge1_max,none": 52.758134839292545,
						"rouge1_max_stderr,none": 0.844118667207512,
						"rouge2_acc,none": 0.2594859241126071,
						"rouge2_acc_stderr,none": 0.015345409485557985,
						"rouge2_diff,none": -11.906284194394052,
						"rouge2_diff_stderr,none": 1.0746410323905284,
						"rouge2_max,none": 36.76129516350012,
						"rouge2_max_stderr,none": 0.9925060034973101,
						"rougeL_acc,none": 0.2937576499388005,
						"rougeL_acc_stderr,none": 0.015945068581236614,
						"rougeL_diff,none": -9.993084014148303,
						"rougeL_diff_stderr,none": 0.899770804733756,
						"rougeL_max,none": 49.81362453814381,
						"rougeL_max_stderr,none": 0.8572901458461103
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.30966952264381886,
						"bleu_acc_stderr,none": 0.01618574435514492,
						"bleu_diff,none": -7.859860900269691,
						"bleu_diff_stderr,none": 0.8222963466121774,
						"bleu_max,none": 26.987244075193836,
						"bleu_max_stderr,none": 0.7941428468729818,
						"rouge1_acc,none": 0.2839657282741738,
						"rouge1_acc_stderr,none": 0.015785370858396736,
						"rouge1_diff,none": -10.02060741980673,
						"rouge1_diff_stderr,none": 0.880297959586132,
						"rouge1_max,none": 52.758134839292545,
						"rouge1_max_stderr,none": 0.844118667207512,
						"rouge2_acc,none": 0.2594859241126071,
						"rouge2_acc_stderr,none": 0.015345409485557985,
						"rouge2_diff,none": -11.906284194394052,
						"rouge2_diff_stderr,none": 1.0746410323905284,
						"rouge2_max,none": 36.76129516350012,
						"rouge2_max_stderr,none": 0.9925060034973101,
						"rougeL_acc,none": 0.2937576499388005,
						"rougeL_acc_stderr,none": 0.015945068581236614,
						"rougeL_diff,none": -9.993084014148303,
						"rougeL_diff_stderr,none": 0.899770804733756,
						"rougeL_max,none": 49.81362453814381,
						"rougeL_max_stderr,none": 0.8572901458461103
					},
					"truthfulqa_mc1": {
						"acc,none": 0.24724602203182375,
						"acc_stderr,none": 0.015102404797359652,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.38363771350692955,
						"acc_stderr,none": 0.013920733188145884,
						"alias": " - truthfulqa_mc2"
					},
					"wikitext": {
						"alias": " - wikitext",
						"bits_per_byte,none": 0.632139855498008,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5498620989564158,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.41316462664955,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7071823204419889,
						"acc_stderr,none": 0.012789321118542604,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": " - wnli"
					},
					"wsc": {
						"acc,none": 0.4230769230769231,
						"acc_stderr,none": 0.048679937479186836,
						"alias": " - wsc"
					},
					"xcopa": {
						"acc,none": 0.6232727272727273,
						"acc_stderr,none": 0.06962250315450383,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.021683827539286122,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231336,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.020313179231745186,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.019920483209566072,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.022382357781962132,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.02223197069632112,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.022149790663861923,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.582,
						"acc_stderr,none": 0.022080014812228137,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.632,
						"acc_stderr,none": 0.02158898256835354,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.019966103540279466,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.020514426225628036,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4350736278447122,
						"acc_stderr,none": 0.05023326746116526,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667057,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.46947791164658637,
						"acc_stderr,none": 0.010003382355314755,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4827309236947791,
						"acc_stderr,none": 0.010016093498409704,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.38313253012048193,
						"acc_stderr,none": 0.009744464994287529,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5405622489959839,
						"acc_stderr,none": 0.009989039874786892,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.46987951807228917,
						"acc_stderr,none": 0.010003871419517727,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4975903614457831,
						"acc_stderr,none": 0.010021956483068088,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.42208835341365464,
						"acc_stderr,none": 0.009899652714895422,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4859437751004016,
						"acc_stderr,none": 0.010018111813088546,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.40883534136546185,
						"acc_stderr,none": 0.00985407806781077,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41325301204819276,
						"acc_stderr,none": 0.00987008743562378,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.4646586345381526,
						"acc_stderr,none": 0.009997006138567233,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.39518072289156625,
						"acc_stderr,none": 0.009799371892746732,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40963855421686746,
						"acc_stderr,none": 0.009857049962123568,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.009556642460138149,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6335960531857289,
						"acc_stderr,none": 0.062072404024590994,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5923229649238915,
						"acc_stderr,none": 0.012645876488040306,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7802779616148247,
						"acc_stderr,none": 0.010655479709353636,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7240238252812706,
						"acc_stderr,none": 0.01150333454985087,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5731303772336201,
						"acc_stderr,none": 0.012728753181936874,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6022501654533422,
						"acc_stderr,none": 0.012595197856703514,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6690933156849769,
						"acc_stderr,none": 0.012108982233131475,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5466578424884183,
						"acc_stderr,none": 0.012810980537828153,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6882859033752482,
						"acc_stderr,none": 0.01191994318039934,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5545996029119789,
						"acc_stderr,none": 0.012790178438084814,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.586366644606221,
						"acc_stderr,none": 0.012673714851823772,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6525479814692257,
						"acc_stderr,none": 0.012253641527935297,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8096201393571589,
						"acc_stderr,none": 0.040039563220538706,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8744086021505376,
						"acc_stderr,none": 0.006874151446168045,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6867469879518072,
						"acc_stderr,none": 0.051219942106581456,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7288842544316997,
						"acc_stderr,none": 0.014362296895048159,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7832699619771863,
						"acc_stderr,none": 0.025454504291142595,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6730158730158731,
						"acc_stderr,none": 0.026473487980890983,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7837301587301587,
						"acc_stderr,none": 0.01835681232408577,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "m8than/FinchX-Med"
	},
	"m8than/mistral-7b-instruct-0.2": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.7243517474633596,
						"acc_norm,none": 0.6987034949267192,
						"acc_norm_stderr,none": 0.06588869288543764,
						"acc_stderr,none": 0.08532691337457736,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4709375,
						"acc_stderr,none": 0.01616629545066717,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.86475,
						"acc_stderr,none": 0.1236037349507068,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.7576567164179104,
						"acc_stderr,none": 0.1733436277133109,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.43536404160475484,
						"acc_norm,none": 0.43536404160475484,
						"acc_norm_stderr,none": 0.14370605584199775,
						"acc_stderr,none": 0.14370605584199775,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.42177516836470386,
						"acc_norm,none": 0.42177516836470386,
						"acc_norm_stderr,none": 0.10457920027324105,
						"acc_stderr,none": 0.10457920027324105,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.701954366766988,
						"likelihood_diff_stderr,none": 0.4909984754945655,
						"pct_stereotype,none": 0.5852713178294573,
						"pct_stereotype_stderr,none": 0.08237726662659446
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.060039370078740155,
						"exact_match_stderr,none": 0.005271302429704627
					},
					"glue": {
						"acc,none": 0.5821075512148642,
						"acc_stderr,none": 0.00042180222474276545,
						"alias": "glue",
						"f1,none": 0.4411312721184814,
						"f1_stderr,none": 0.0013090581534606635,
						"mcc,none": 0.13447152115893618,
						"mcc_stderr,none": 0.03255872877924799
					},
					"kmmlu": {
						"acc,none": 0.34805082298585044,
						"acc_norm,none": 0.34805082298585044,
						"acc_norm_stderr,none": 0.0720026679382373,
						"acc_stderr,none": 0.0720026679382373,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.6044727033545275,
						"acc_norm,none": 0.49,
						"acc_norm_stderr,none": 0.0005008016032064087,
						"acc_stderr,none": 0.10425554434639311,
						"alias": "kobest",
						"f1,none": 0.5572606858700483,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6836794100523966,
						"acc_stderr,none": 0.01672814469839814,
						"alias": "lambada",
						"perplexity,none": 3.784860922534949,
						"perplexity_stderr,none": 0.21243710178079137
					},
					"lambada_cloze": {
						"acc,none": 0.2098777411216767,
						"acc_stderr,none": 0.04781818869704983,
						"alias": "lambada_cloze",
						"perplexity,none": 79.66147457124333,
						"perplexity_stderr,none": 21.394086996235064
					},
					"mmlu": {
						"acc,none": 0.5893747329440251,
						"acc_stderr,none": 0.12615955327862066,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.5375132837407015,
						"acc_stderr,none": 0.14096864671190043,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.664628258770518,
						"acc_stderr,none": 0.09811125295244115,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.6873578160545987,
						"acc_stderr,none": 0.08390077225869963,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.4969869965112591,
						"acc_stderr,none": 0.1084738786137943,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.5178140525195174,
						"acc_norm,none": 0.4779975719488681,
						"acc_norm_stderr,none": 0.00015397501597299025,
						"acc_stderr,none": 0.06923035174785634,
						"alias": "stem"
					},
					"pythia": {
						"acc,none": 0.7283806813415201,
						"acc_norm,none": 0.7003817448395876,
						"acc_norm_stderr,none": 0.005970464335515298,
						"acc_stderr,none": 0.16111508221444962,
						"alias": "pythia",
						"bits_per_byte,none": 0.615588939628387,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5321833401531846,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3987038759067074,
						"perplexity_stderr,none": 0.07238587778792913,
						"word_perplexity,none": 9.79354701944479,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.5478723404255319,
						"acc_norm,none": 0.5797872340425532,
						"acc_norm_stderr,none": 0.0785497170880167,
						"acc_stderr,none": 0.05995870420406338,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.8909187714219161,
						"acc_stderr,none": 0.06492188924400273,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.5969340297618705,
						"acc_stderr,none": 0.0015597689440213628,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.5446756425948592,
						"bleu_acc_stderr,none": 0.017433490102538775,
						"bleu_diff,none": 4.366315081410718,
						"bleu_diff_stderr,none": 0.6138335139825885,
						"bleu_max,none": 21.27473498730541,
						"bleu_max_stderr,none": 0.7533871133042884,
						"rouge1_acc,none": 0.5716034271725826,
						"rouge1_acc_stderr,none": 0.01732308859731477,
						"rouge1_diff,none": 5.717141224803091,
						"rouge1_diff_stderr,none": 0.870296684878197,
						"rouge1_max,none": 46.258060058643956,
						"rouge1_max_stderr,none": 0.8552045039813724,
						"rouge2_acc,none": 0.44920440636474906,
						"rouge2_acc_stderr,none": 0.017412941986115277,
						"rouge2_diff,none": 5.424527805363653,
						"rouge2_diff_stderr,none": 0.9322851416532594,
						"rouge2_max,none": 31.09098851770095,
						"rouge2_max_stderr,none": 0.9802688849674306,
						"rougeL_acc,none": 0.5532435740514076,
						"rougeL_acc_stderr,none": 0.017403977522557144,
						"rougeL_diff,none": 5.486366464760439,
						"rougeL_diff_stderr,none": 0.8794580025029041,
						"rougeL_max,none": 43.2095248401885,
						"rougeL_max_stderr,none": 0.871503759712897
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.7243517474633596,
						"acc_norm,none": 0.6987034949267192,
						"acc_norm_stderr,none": 0.06588869288543764,
						"acc_stderr,none": 0.08532691337457736,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.4709375,
						"acc_stderr,none": 0.01616629545066717,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.482,
						"acc_stderr,none": 0.015809045699406728,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.463,
						"acc_stderr,none": 0.015775927227262416,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.4683333333333333,
						"acc_stderr,none": 0.014410785687762972,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.5443686006825939,
						"acc_norm,none": 0.560580204778157,
						"acc_norm_stderr,none": 0.014503747823580127,
						"acc_stderr,none": 0.014553749939306868,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.8131313131313131,
						"acc_norm,none": 0.7668350168350169,
						"acc_norm_stderr,none": 0.008676624951179686,
						"acc_stderr,none": 0.007998652011081705,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.86475,
						"acc_stderr,none": 0.1236037349507068,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.6415,
						"acc_stderr,none": 0.010725968403790007,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.9875,
						"acc_stderr,none": 0.0024849471787626713,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.5645,
						"acc_stderr,none": 0.011089696374691111,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0017272787111155114,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.9675,
						"acc_stderr,none": 0.003966073608738835,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.004329997048176552,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.9225,
						"acc_stderr,none": 0.005980364318224214,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.9165,
						"acc_stderr,none": 0.006187327089160045,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.8715,
						"acc_stderr,none": 0.0074847769467749,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.821,
						"acc_stderr,none": 0.008574162240654349,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.7576567164179104,
						"acc_stderr,none": 0.1733436277133109,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837957,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426112,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.0047427305946568,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617331,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823335,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.743,
						"acc_stderr,none": 0.013825416526895036,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.01581727492920901,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.657,
						"acc_stderr,none": 0.015019206922356951,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.014013292702729479,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697601,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.01360735683959812,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345693,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.01391220865102135,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.768,
						"acc_stderr,none": 0.013354937452281562,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.012841374572096921,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.733,
						"acc_stderr,none": 0.013996674851796296,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.844,
						"acc_stderr,none": 0.011480235006122363,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.843,
						"acc_stderr,none": 0.0115101469792302,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.014078856992462615,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.013106173040661773,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785145,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.819,
						"acc_stderr,none": 0.012181436179177904,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.013019735539307799,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.0040899544896890894,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.334,
						"acc_stderr,none": 0.014922019523732967,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.797,
						"acc_stderr,none": 0.012726073744598288,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.731,
						"acc_stderr,none": 0.014029819522568196,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.647,
						"acc_stderr,none": 0.0151201726054837,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.01288666233227455,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698474,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.00923305200078774,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.01009340759490464,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745895,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.464,
						"acc_stderr,none": 0.01577824302490459,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.010811655372416053,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.461,
						"acc_stderr,none": 0.015771104201283186,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.015797897758042773,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.015816135752773207,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.0052195060344100395,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.011569479368271306,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408046,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855747,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.711,
						"acc_stderr,none": 0.014341711358296191,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.0093636893732481,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329854,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.734,
						"acc_stderr,none": 0.013979965645145148,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.533,
						"acc_stderr,none": 0.015784807891138775,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.631,
						"acc_stderr,none": 0.015266698139154615,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.012486268734370145,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525023,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844884,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.697,
						"acc_stderr,none": 0.01453968371053526,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.452,
						"acc_stderr,none": 0.015746235865880677,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298276,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783255,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.581,
						"acc_stderr,none": 0.015610338967577797,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.817,
						"acc_stderr,none": 0.012233587399477823,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.012583693787968126,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.014142984975740668,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.263,
						"acc_stderr,none": 0.013929286594259729,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.304,
						"acc_stderr,none": 0.014553205687950436,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745892,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.261,
						"acc_stderr,none": 0.013895037677965133,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.010391293421849877,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.354,
						"acc_stderr,none": 0.015129868238451772,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.8541284403669724,
						"acc_stderr,none": 0.006173613606532019,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.058387420812114225,
						"alias": "cb",
						"f1,none": 0.6059523809523809,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.43536404160475484,
						"acc_norm,none": 0.43536404160475484,
						"acc_norm_stderr,none": 0.14370605584199775,
						"acc_stderr,none": 0.14370605584199775,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.3673469387755102,
						"acc_norm,none": 0.3673469387755102,
						"acc_norm_stderr,none": 0.06958255967849926,
						"acc_stderr,none": 0.06958255967849926,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275463,
						"acc_stderr,none": 0.08124094920275463,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3939393939393939,
						"acc_norm,none": 0.3939393939393939,
						"acc_norm_stderr,none": 0.08637692614387409,
						"acc_stderr,none": 0.08637692614387409,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.3404255319148936,
						"acc_norm,none": 0.3404255319148936,
						"acc_norm_stderr,none": 0.06986570800554746,
						"acc_stderr,none": 0.06986570800554746,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.10094660663590604,
						"acc_stderr,none": 0.10094660663590604,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.41818181818181815,
						"acc_norm,none": 0.41818181818181815,
						"acc_norm_stderr,none": 0.0671242332357016,
						"acc_stderr,none": 0.0671242332357016,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.5675675675675675,
						"acc_norm,none": 0.5675675675675675,
						"acc_norm_stderr,none": 0.08256893144064577,
						"acc_stderr,none": 0.08256893144064577,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.5714285714285714,
						"acc_norm,none": 0.5714285714285714,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.5789473684210527,
						"acc_norm,none": 0.5789473684210527,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.11180339887498948,
						"acc_stderr,none": 0.11180339887498948,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.4482758620689655,
						"acc_norm,none": 0.4482758620689655,
						"acc_norm_stderr,none": 0.09398415777506855,
						"acc_stderr,none": 0.09398415777506855,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.4594594594594595,
						"acc_norm,none": 0.4594594594594595,
						"acc_norm_stderr,none": 0.08305895907471071,
						"acc_stderr,none": 0.08305895907471071,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.45161290322580644,
						"acc_norm,none": 0.45161290322580644,
						"acc_norm_stderr,none": 0.09085862440549507,
						"acc_stderr,none": 0.09085862440549507,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.3870967741935484,
						"acc_norm,none": 0.3870967741935484,
						"acc_norm_stderr,none": 0.08892934678767889,
						"acc_stderr,none": 0.08892934678767889,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.65,
						"acc_norm,none": 0.65,
						"acc_norm_stderr,none": 0.10942433098048308,
						"acc_stderr,none": 0.10942433098048308,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033673,
						"acc_stderr,none": 0.10083169033033673,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522561,
						"acc_stderr,none": 0.11369720523522561,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.631578947368421,
						"acc_norm,none": 0.631578947368421,
						"acc_norm_stderr,none": 0.11369720523522563,
						"acc_stderr,none": 0.11369720523522563,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.6842105263157895,
						"acc_norm,none": 0.6842105263157895,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.09829463743659811,
						"acc_stderr,none": 0.09829463743659811,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.6956521739130435,
						"acc_norm,none": 0.6956521739130435,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.1042572070285374,
						"acc_stderr,none": 0.1042572070285374,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.4583333333333333,
						"acc_norm,none": 0.4583333333333333,
						"acc_norm_stderr,none": 0.10389457216622949,
						"acc_stderr,none": 0.10389457216622949,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.5714285714285714,
						"acc_norm,none": 0.5714285714285714,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.15075567228888181,
						"acc_stderr,none": 0.15075567228888181,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.5909090909090909,
						"acc_norm,none": 0.5909090909090909,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.5714285714285714,
						"acc_norm,none": 0.5714285714285714,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.391304347826087,
						"acc_norm,none": 0.391304347826087,
						"acc_norm_stderr,none": 0.10405096111532161,
						"acc_stderr,none": 0.10405096111532161,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.32653061224489793,
						"acc_norm,none": 0.32653061224489793,
						"acc_norm_stderr,none": 0.06768622021133469,
						"acc_stderr,none": 0.06768622021133469,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.4827586206896552,
						"acc_norm,none": 0.4827586206896552,
						"acc_norm_stderr,none": 0.09443492370778725,
						"acc_stderr,none": 0.09443492370778725,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.3877551020408163,
						"acc_norm,none": 0.3877551020408163,
						"acc_norm_stderr,none": 0.07032677934739909,
						"acc_stderr,none": 0.07032677934739909,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.5909090909090909,
						"acc_norm,none": 0.5909090909090909,
						"acc_norm_stderr,none": 0.07497837474124878,
						"acc_stderr,none": 0.07497837474124878,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.5652173913043478,
						"acc_norm,none": 0.5652173913043478,
						"acc_norm_stderr,none": 0.07389883353033022,
						"acc_stderr,none": 0.07389883353033022,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.10154334054280735,
						"acc_stderr,none": 0.10154334054280735,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.42177516836470386,
						"acc_norm,none": 0.42177516836470386,
						"acc_norm_stderr,none": 0.10457920027324105,
						"acc_stderr,none": 0.10457920027324105,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.3905325443786982,
						"acc_norm,none": 0.3905325443786982,
						"acc_norm_stderr,none": 0.03763996705629264,
						"acc_stderr,none": 0.03763996705629264,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.38125,
						"acc_norm,none": 0.38125,
						"acc_norm_stderr,none": 0.03851802138867096,
						"acc_stderr,none": 0.03851802138867096,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3696969696969697,
						"acc_norm,none": 0.3696969696969697,
						"acc_norm_stderr,none": 0.037694303145125674,
						"acc_stderr,none": 0.037694303145125674,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.49282296650717705,
						"acc_norm,none": 0.49282296650717705,
						"acc_norm_stderr,none": 0.03466519051738992,
						"acc_stderr,none": 0.03466519051738992,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.03782614981812042,
						"acc_stderr,none": 0.03782614981812042,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.6259541984732825,
						"acc_norm,none": 0.6259541984732825,
						"acc_norm_stderr,none": 0.042438692422305246,
						"acc_stderr,none": 0.042438692422305246,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3235294117647059,
						"acc_norm,none": 0.3235294117647059,
						"acc_norm_stderr,none": 0.04026377210787309,
						"acc_stderr,none": 0.04026377210787309,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.42990654205607476,
						"acc_norm,none": 0.42990654205607476,
						"acc_norm_stderr,none": 0.04808472349429953,
						"acc_stderr,none": 0.04808472349429953,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.46439628482972134,
						"acc_norm,none": 0.46439628482972134,
						"acc_norm_stderr,none": 0.02779317890009884,
						"acc_stderr,none": 0.02779317890009884,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3627450980392157,
						"acc_norm,none": 0.3627450980392157,
						"acc_norm_stderr,none": 0.03374499356319355,
						"acc_stderr,none": 0.03374499356319355,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.5139664804469274,
						"acc_norm,none": 0.5139664804469274,
						"acc_norm_stderr,none": 0.03746196103854561,
						"acc_stderr,none": 0.03746196103854561,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0306858205966108,
						"acc_stderr,none": 0.0306858205966108,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.29245283018867924,
						"acc_norm,none": 0.29245283018867924,
						"acc_norm_stderr,none": 0.04439263906199628,
						"acc_stderr,none": 0.04439263906199628,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.4485981308411215,
						"acc_norm,none": 0.4485981308411215,
						"acc_norm_stderr,none": 0.04830698295619321,
						"acc_stderr,none": 0.04830698295619321,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.4716981132075472,
						"acc_norm,none": 0.4716981132075472,
						"acc_norm_stderr,none": 0.04871677165040776,
						"acc_stderr,none": 0.04871677165040776,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.28703703703703703,
						"acc_norm,none": 0.28703703703703703,
						"acc_norm_stderr,none": 0.043733130409147614,
						"acc_stderr,none": 0.043733130409147614,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.44339622641509435,
						"acc_norm,none": 0.44339622641509435,
						"acc_norm_stderr,none": 0.0484813182297548,
						"acc_stderr,none": 0.0484813182297548,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.3772893772893773,
						"acc_norm,none": 0.3772893772893773,
						"acc_norm_stderr,none": 0.029389755560221963,
						"acc_stderr,none": 0.029389755560221963,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.5147058823529411,
						"acc_norm,none": 0.5147058823529411,
						"acc_norm_stderr,none": 0.03507793834791325,
						"acc_stderr,none": 0.03507793834791325,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.5964912280701754,
						"acc_norm,none": 0.5964912280701754,
						"acc_norm_stderr,none": 0.03762738699917057,
						"acc_stderr,none": 0.03762738699917057,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.4217687074829932,
						"acc_norm,none": 0.4217687074829932,
						"acc_norm_stderr,none": 0.0408706500237496,
						"acc_stderr,none": 0.0408706500237496,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.39568345323741005,
						"acc_norm,none": 0.39568345323741005,
						"acc_norm_stderr,none": 0.04162618828625745,
						"acc_stderr,none": 0.04162618828625745,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.5031446540880503,
						"acc_norm,none": 0.5031446540880503,
						"acc_norm_stderr,none": 0.03977707748639468,
						"acc_stderr,none": 0.03977707748639468,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.4723926380368098,
						"acc_norm,none": 0.4723926380368098,
						"acc_norm_stderr,none": 0.039223782906109894,
						"acc_stderr,none": 0.039223782906109894,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.4186046511627907,
						"acc_norm,none": 0.4186046511627907,
						"acc_norm_stderr,none": 0.03772591189087505,
						"acc_stderr,none": 0.03772591189087505,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.027751792418790926,
						"acc_stderr,none": 0.027751792418790926,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.40404040404040403,
						"acc_norm,none": 0.40404040404040403,
						"acc_norm_stderr,none": 0.03496130972056126,
						"acc_stderr,none": 0.03496130972056126,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.6722689075630253,
						"acc_norm,none": 0.6722689075630253,
						"acc_norm_stderr,none": 0.03048991141767323,
						"acc_stderr,none": 0.03048991141767323,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.02836109930007507,
						"acc_stderr,none": 0.02836109930007507,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.34814814814814815,
						"acc_norm,none": 0.34814814814814815,
						"acc_norm_stderr,none": 0.041153246103369526,
						"acc_stderr,none": 0.041153246103369526,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.4125874125874126,
						"acc_norm,none": 0.4125874125874126,
						"acc_norm_stderr,none": 0.04131287692392343,
						"acc_stderr,none": 0.04131287692392343,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.036363636363636376,
						"acc_stderr,none": 0.036363636363636376,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.42953020134228187,
						"acc_norm,none": 0.42953020134228187,
						"acc_norm_stderr,none": 0.04068949724015223,
						"acc_stderr,none": 0.04068949724015223,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.40236686390532544,
						"acc_norm,none": 0.40236686390532544,
						"acc_norm_stderr,none": 0.03783326285416536,
						"acc_stderr,none": 0.03783326285416536,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.04118680421434766,
						"acc_stderr,none": 0.04118680421434766,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.4152542372881356,
						"acc_norm,none": 0.4152542372881356,
						"acc_norm_stderr,none": 0.045556216394221444,
						"acc_stderr,none": 0.045556216394221444,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.03334645408665337,
						"acc_stderr,none": 0.03334645408665337,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.047093069786618966,
						"acc_stderr,none": 0.047093069786618966,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.4125874125874126,
						"acc_norm,none": 0.4125874125874126,
						"acc_norm_stderr,none": 0.04131287692392343,
						"acc_stderr,none": 0.04131287692392343,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.5079365079365079,
						"acc_norm,none": 0.5079365079365079,
						"acc_norm_stderr,none": 0.044715725362943486,
						"acc_stderr,none": 0.044715725362943486,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.03611575592573071,
						"acc_stderr,none": 0.03611575592573071,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.5116279069767442,
						"acc_norm,none": 0.5116279069767442,
						"acc_norm_stderr,none": 0.03822561461565632,
						"acc_stderr,none": 0.03822561461565632,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.45012165450121655,
						"acc_norm,none": 0.45012165450121655,
						"acc_norm_stderr,none": 0.024570066591643822,
						"acc_stderr,none": 0.024570066591643822,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.7429906542056075,
						"acc_norm,none": 0.7429906542056075,
						"acc_norm_stderr,none": 0.029941691533244635,
						"acc_stderr,none": 0.029941691533244635,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.4715447154471545,
						"acc_norm,none": 0.4715447154471545,
						"acc_norm_stderr,none": 0.04519450648295478,
						"acc_stderr,none": 0.04519450648295478,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.45081967213114754,
						"acc_norm,none": 0.45081967213114754,
						"acc_norm_stderr,none": 0.04523412879516006,
						"acc_stderr,none": 0.04523412879516006,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.5285714285714286,
						"acc_norm,none": 0.5285714285714286,
						"acc_norm_stderr,none": 0.03452921053595503,
						"acc_stderr,none": 0.03452921053595503,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.5555555555555556,
						"acc_norm,none": 0.5555555555555556,
						"acc_norm_stderr,none": 0.03714034835915976,
						"acc_stderr,none": 0.03714034835915976,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.47619047619047616,
						"acc_norm,none": 0.47619047619047616,
						"acc_norm_stderr,none": 0.03642487945744188,
						"acc_stderr,none": 0.03642487945744188,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3275862068965517,
						"acc_norm,none": 0.3275862068965517,
						"acc_norm_stderr,none": 0.04376552980994349,
						"acc_stderr,none": 0.04376552980994349,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3724137931034483,
						"acc_norm,none": 0.3724137931034483,
						"acc_norm_stderr,none": 0.04028731532947558,
						"acc_stderr,none": 0.04028731532947558,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.41904761904761906,
						"acc_norm,none": 0.41904761904761906,
						"acc_norm_stderr,none": 0.0483821637528253,
						"acc_stderr,none": 0.0483821637528253,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.46285714285714286,
						"acc_norm,none": 0.46285714285714286,
						"acc_norm_stderr,none": 0.03780017090541436,
						"acc_stderr,none": 0.03780017090541436,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.3175355450236967,
						"acc_norm,none": 0.3175355450236967,
						"acc_norm_stderr,none": 0.032123772733289015,
						"acc_stderr,none": 0.032123772733289015,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.02309923743072034,
						"acc_stderr,none": 0.02309923743072034,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.4267241379310345,
						"acc_norm,none": 0.4267241379310345,
						"acc_norm_stderr,none": 0.032542389807201505,
						"acc_stderr,none": 0.032542389807201505,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.45977011494252873,
						"acc_norm,none": 0.45977011494252873,
						"acc_norm_stderr,none": 0.03789104827773084,
						"acc_stderr,none": 0.03789104827773084,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.45185185185185184,
						"acc_norm,none": 0.45185185185185184,
						"acc_norm_stderr,none": 0.04299268905480864,
						"acc_stderr,none": 0.04299268905480864,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.03333333333333334,
						"acc_stderr,none": 0.03333333333333334,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.03888176921674101,
						"acc_stderr,none": 0.03888176921674101,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.3675675675675676,
						"acc_norm,none": 0.3675675675675676,
						"acc_norm_stderr,none": 0.03554403659088362,
						"acc_stderr,none": 0.03554403659088362,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.48520710059171596,
						"acc_norm,none": 0.48520710059171596,
						"acc_norm_stderr,none": 0.038558950703150026,
						"acc_stderr,none": 0.038558950703150026,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.4968944099378882,
						"acc_norm,none": 0.4968944099378882,
						"acc_norm_stderr,none": 0.039527708265086496,
						"acc_stderr,none": 0.039527708265086496,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.44375,
						"acc_norm,none": 0.44375,
						"acc_norm_stderr,none": 0.039400853796259426,
						"acc_stderr,none": 0.039400853796259426,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.13447152115893618,
						"mcc_stderr,none": 0.03255872877924799
					},
					"copa": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.0272659924344291,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.701954366766988,
						"likelihood_diff_stderr,none": 0.4909984754945655,
						"pct_stereotype,none": 0.5852713178294573,
						"pct_stereotype_stderr,none": 0.08237726662659446
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 4.728318410610684,
						"likelihood_diff_stderr,none": 0.11364638787463273,
						"pct_stereotype,none": 0.652355396541443,
						"pct_stereotype_stderr,none": 0.011632494841772138
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.63750977568574,
						"likelihood_diff_stderr,none": 0.43934175196229514,
						"pct_stereotype,none": 0.6483516483516484,
						"pct_stereotype_stderr,none": 0.05033132318627889
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 7.396305604414507,
						"likelihood_diff_stderr,none": 3.1171967439830928,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726127
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 7.456242370605469,
						"likelihood_diff_stderr,none": 0.871818709843644,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 4.154832792282105,
						"likelihood_diff_stderr,none": 0.21703693448635447,
						"pct_stereotype,none": 0.55625,
						"pct_stereotype_stderr,none": 0.027816907957904924
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 4.153910866490117,
						"likelihood_diff_stderr,none": 0.301365620900703,
						"pct_stereotype,none": 0.6342592592592593,
						"pct_stereotype_stderr,none": 0.03284738857647207
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.940925333234999,
						"likelihood_diff_stderr,none": 0.5247775857393626,
						"pct_stereotype,none": 0.7222222222222222,
						"pct_stereotype_stderr,none": 0.05315633121839993
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 4.790061259832908,
						"likelihood_diff_stderr,none": 0.2177945155847267,
						"pct_stereotype,none": 0.6240157480314961,
						"pct_stereotype_stderr,none": 0.021511892689881264
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.35251844251478,
						"likelihood_diff_stderr,none": 0.4237083726333467,
						"pct_stereotype,none": 0.6936936936936937,
						"pct_stereotype_stderr,none": 0.04395066997351522
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.2641012540427585,
						"likelihood_diff_stderr,none": 0.4690143572180774,
						"pct_stereotype,none": 0.8709677419354839,
						"pct_stereotype_stderr,none": 0.03495073154102979
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 5.014639583386873,
						"likelihood_diff_stderr,none": 0.2871902327259298,
						"pct_stereotype,none": 0.7157894736842105,
						"pct_stereotype_stderr,none": 0.032808156735746566
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 4.6755903229232905,
						"likelihood_diff_stderr,none": 0.11937914584178587,
						"pct_stereotype,none": 0.5181872391174717,
						"pct_stereotype_stderr,none": 0.012205216819921415
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 4.157307900322808,
						"likelihood_diff_stderr,none": 0.5613919054431789,
						"pct_stereotype,none": 0.5555555555555556,
						"pct_stereotype_stderr,none": 0.052671718126664185
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.714832012469952,
						"likelihood_diff_stderr,none": 0.9756168356853743,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.44508847323331,
						"likelihood_diff_stderr,none": 0.5723245759948886,
						"pct_stereotype,none": 0.696969696969697,
						"pct_stereotype_stderr,none": 0.057002420795512765
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 4.274234236957871,
						"likelihood_diff_stderr,none": 0.23854640441412978,
						"pct_stereotype,none": 0.514018691588785,
						"pct_stereotype_stderr,none": 0.02793986154930237
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 5.167164783703951,
						"likelihood_diff_stderr,none": 0.31564110808595924,
						"pct_stereotype,none": 0.38735177865612647,
						"pct_stereotype_stderr,none": 0.03068725875850367
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.908005131615533,
						"likelihood_diff_stderr,none": 0.4630662364337442,
						"pct_stereotype,none": 0.5555555555555556,
						"pct_stereotype_stderr,none": 0.05897165471491952
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 4.7319683406663975,
						"likelihood_diff_stderr,none": 0.24122375329444992,
						"pct_stereotype,none": 0.46956521739130436,
						"pct_stereotype_stderr,none": 0.02329472641787361
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.6097795569378395,
						"likelihood_diff_stderr,none": 0.3328089300932936,
						"pct_stereotype,none": 0.5217391304347826,
						"pct_stereotype_stderr,none": 0.046785007552084375
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 5.085197993687221,
						"likelihood_diff_stderr,none": 0.39786751659300973,
						"pct_stereotype,none": 0.7362637362637363,
						"pct_stereotype_stderr,none": 0.046449428524973954
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 5.325802306739652,
						"likelihood_diff_stderr,none": 0.4336956957977887,
						"pct_stereotype,none": 0.6173469387755102,
						"pct_stereotype_stderr,none": 0.03480566531840031
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.060039370078740155,
						"exact_match_stderr,none": 0.005271302429704627
					},
					"glue": {
						"acc,none": 0.5821075512148642,
						"acc_stderr,none": 0.00042180222474276545,
						"alias": "glue",
						"f1,none": 0.4411312721184814,
						"f1_stderr,none": 0.0013090581534606635,
						"mcc,none": 0.13447152115893618,
						"mcc_stderr,none": 0.03255872877924799
					},
					"hellaswag": {
						"acc,none": 0.6594303923521211,
						"acc_norm,none": 0.8361880103565027,
						"acc_norm_stderr,none": 0.0036934848941792987,
						"acc_stderr,none": 0.004729322613301575,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.34805082298585044,
						"acc_norm,none": 0.34805082298585044,
						"acc_norm_stderr,none": 0.0720026679382373,
						"acc_stderr,none": 0.0720026679382373,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909283,
						"acc_stderr,none": 0.04292346959909283,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.298,
						"acc_norm,none": 0.298,
						"acc_norm_stderr,none": 0.014470846741134715,
						"acc_stderr,none": 0.014470846741134715,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.014566646394664375,
						"acc_stderr,none": 0.014566646394664375,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.014512395033543152,
						"acc_stderr,none": 0.014512395033543152,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.323,
						"acc_norm,none": 0.323,
						"acc_norm_stderr,none": 0.014794927843348632,
						"acc_stderr,none": 0.014794927843348632,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.2966666666666667,
						"acc_norm,none": 0.2966666666666667,
						"acc_norm_stderr,none": 0.01866386416676141,
						"acc_stderr,none": 0.01866386416676141,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.349,
						"acc_norm,none": 0.349,
						"acc_norm_stderr,none": 0.015080663991563102,
						"acc_stderr,none": 0.015080663991563102,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.593,
						"acc_norm,none": 0.593,
						"acc_norm_stderr,none": 0.015543249100255542,
						"acc_stderr,none": 0.015543249100255542,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.298,
						"acc_norm,none": 0.298,
						"acc_norm_stderr,none": 0.014470846741134706,
						"acc_stderr,none": 0.014470846741134706,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.03165255790786192,
						"acc_stderr,none": 0.03165255790786192,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.376,
						"acc_norm,none": 0.376,
						"acc_norm_stderr,none": 0.01532510550889813,
						"acc_stderr,none": 0.01532510550889813,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3384615384615385,
						"acc_norm,none": 0.3384615384615385,
						"acc_norm_stderr,none": 0.0416617354083896,
						"acc_stderr,none": 0.0416617354083896,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.44,
						"acc_norm,none": 0.44,
						"acc_norm_stderr,none": 0.04988876515698589,
						"acc_stderr,none": 0.04988876515698589,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.327,
						"acc_norm,none": 0.327,
						"acc_norm_stderr,none": 0.014842213153411239,
						"acc_stderr,none": 0.014842213153411239,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.438,
						"acc_norm,none": 0.438,
						"acc_norm_stderr,none": 0.015697210019694686,
						"acc_stderr,none": 0.015697210019694686,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485258,
						"acc_stderr,none": 0.014174516461485258,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.292,
						"acc_norm,none": 0.292,
						"acc_norm_stderr,none": 0.014385511563477348,
						"acc_stderr,none": 0.014385511563477348,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.338,
						"acc_norm,none": 0.338,
						"acc_norm_stderr,none": 0.01496596071022448,
						"acc_stderr,none": 0.01496596071022448,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.324,
						"acc_norm,none": 0.324,
						"acc_norm_stderr,none": 0.014806864733738863,
						"acc_stderr,none": 0.014806864733738863,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306263,
						"acc_stderr,none": 0.014158794845306263,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.326,
						"acc_norm,none": 0.326,
						"acc_norm_stderr,none": 0.014830507204541037,
						"acc_stderr,none": 0.014830507204541037,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.37,
						"acc_norm,none": 0.37,
						"acc_norm_stderr,none": 0.04852365870939099,
						"acc_stderr,none": 0.04852365870939099,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.357,
						"acc_norm,none": 0.357,
						"acc_norm_stderr,none": 0.015158521721486769,
						"acc_stderr,none": 0.015158521721486769,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.531,
						"acc_norm,none": 0.531,
						"acc_norm_stderr,none": 0.015788865959539006,
						"acc_stderr,none": 0.015788865959539006,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.382,
						"acc_norm,none": 0.382,
						"acc_norm_stderr,none": 0.015372453034968531,
						"acc_stderr,none": 0.015372453034968531,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.336,
						"acc_norm,none": 0.336,
						"acc_norm_stderr,none": 0.014944140233795018,
						"acc_stderr,none": 0.014944140233795018,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.318,
						"acc_norm,none": 0.318,
						"acc_norm_stderr,none": 0.014734079309311901,
						"acc_stderr,none": 0.014734079309311901,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.394,
						"acc_norm,none": 0.394,
						"acc_norm_stderr,none": 0.015459721957493375,
						"acc_stderr,none": 0.015459721957493375,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.35333333333333333,
						"acc_norm,none": 0.35333333333333333,
						"acc_norm_stderr,none": 0.019530759477238566,
						"acc_stderr,none": 0.019530759477238566,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.511,
						"acc_norm,none": 0.511,
						"acc_norm_stderr,none": 0.015815471195292682,
						"acc_stderr,none": 0.015815471195292682,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361428,
						"acc_stderr,none": 0.014498627873361428,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.287,
						"acc_norm,none": 0.287,
						"acc_norm_stderr,none": 0.014312087053809961,
						"acc_stderr,none": 0.014312087053809961,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.297,
						"acc_norm,none": 0.297,
						"acc_norm_stderr,none": 0.014456832294801094,
						"acc_stderr,none": 0.014456832294801094,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.31,
						"acc_norm,none": 0.31,
						"acc_norm_stderr,none": 0.04648231987117316,
						"acc_stderr,none": 0.04648231987117316,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.39,
						"acc_norm,none": 0.39,
						"acc_norm_stderr,none": 0.028207307101406273,
						"acc_stderr,none": 0.028207307101406273,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.014326941797231563,
						"acc_stderr,none": 0.014326941797231563,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.337,
						"acc_norm,none": 0.337,
						"acc_norm_stderr,none": 0.014955087918653605,
						"acc_stderr,none": 0.014955087918653605,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.01417451646148526,
						"acc_stderr,none": 0.01417451646148526,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.0338114785302567,
						"acc_stderr,none": 0.0338114785302567,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.271,
						"acc_norm,none": 0.271,
						"acc_norm_stderr,none": 0.014062601350986186,
						"acc_stderr,none": 0.014062601350986186,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.365,
						"acc_norm,none": 0.365,
						"acc_norm_stderr,none": 0.015231776226264876,
						"acc_stderr,none": 0.015231776226264876,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.36,
						"acc_norm,none": 0.36,
						"acc_norm_stderr,none": 0.034026297840400156,
						"acc_stderr,none": 0.034026297840400156,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.474,
						"acc_norm,none": 0.474,
						"acc_norm_stderr,none": 0.01579789775804277,
						"acc_stderr,none": 0.01579789775804277,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.6044727033545275,
						"acc_norm,none": 0.49,
						"acc_norm_stderr,none": 0.0005008016032064087,
						"acc_stderr,none": 0.10425554434639311,
						"alias": "kobest",
						"f1,none": 0.5572606858700483,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.811965811965812,
						"acc_stderr,none": 0.010431780632246384,
						"alias": " - kobest_boolq",
						"f1,none": 0.8116217798594848,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.589,
						"acc_stderr,none": 0.015566673418599278,
						"alias": " - kobest_copa",
						"f1,none": 0.5873622407334711,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.402,
						"acc_norm,none": 0.49,
						"acc_norm_stderr,none": 0.022378596989230774,
						"acc_stderr,none": 0.02194892960993861,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3993720204508453,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5365239294710328,
						"acc_stderr,none": 0.02505881982355679,
						"alias": " - kobest_sentineg",
						"f1,none": 0.4479777522519799,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4873015873015873,
						"acc_stderr,none": 0.014086951987375836,
						"alias": " - kobest_wic",
						"f1,none": 0.3470266593076538,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6836794100523966,
						"acc_stderr,none": 0.01672814469839814,
						"alias": "lambada",
						"perplexity,none": 3.784860922534949,
						"perplexity_stderr,none": 0.21243710178079137
					},
					"lambada_cloze": {
						"acc,none": 0.2098777411216767,
						"acc_stderr,none": 0.04781818869704983,
						"alias": "lambada_cloze",
						"perplexity,none": 79.66147457124333,
						"perplexity_stderr,none": 21.394086996235064
					},
					"lambada_openai": {
						"acc,none": 0.7141470987774112,
						"acc_stderr,none": 0.006294731543352489,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3987038759067074,
						"perplexity_stderr,none": 0.07238587778792913
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.30487094896176986,
						"acc_stderr,none": 0.006413613926848425,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 37.336051664991544,
						"perplexity_stderr,none": 1.1681281576855278
					},
					"lambada_standard": {
						"acc,none": 0.6528235979041335,
						"acc_stderr,none": 0.006632619664862152,
						"alias": " - lambada_standard",
						"perplexity,none": 4.172849343121048,
						"perplexity_stderr,none": 0.0987968797145809
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.11488453328158355,
						"acc_stderr,none": 0.004442657362858413,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 121.9868974774951,
						"perplexity_stderr,none": 4.271260132338575
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.48982188295165396,
						"exact_match_stderr,get-answer": 0.012612230884189321
					},
					"logiqa": {
						"acc,none": 0.29339477726574503,
						"acc_norm,none": 0.30721966205837176,
						"acc_norm_stderr,none": 0.018095292260828216,
						"acc_stderr,none": 0.017859032704399504,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.3524173027989822,
						"acc_norm,none": 0.3428753180661578,
						"acc_norm_stderr,none": 0.011975782754482163,
						"acc_stderr,none": 0.012052805191199441,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.37051926298157456,
						"acc_norm,none": 0.369179229480737,
						"acc_norm_stderr,none": 0.008834301408008675,
						"acc_stderr,none": 0.008840914868809936,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.7989832662571489,
						"acc_stderr,none": 0.004124544076426738,
						"alias": "mc_taco",
						"f1,none": 0.73402466367713,
						"f1_stderr,none": 0.005870749623260156
					},
					"medmcqa": {
						"acc,none": 0.4644991632799426,
						"acc_norm,none": 0.4644991632799426,
						"acc_norm_stderr,none": 0.007712239903619723,
						"acc_stderr,none": 0.007712239903619723,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.5019638648860958,
						"acc_norm,none": 0.5019638648860958,
						"acc_norm_stderr,none": 0.014019195713555015,
						"acc_stderr,none": 0.014019195713555015,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.5893747329440251,
						"acc_stderr,none": 0.12615955327862066,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.5851851851851851,
						"acc_stderr,none": 0.042561937679014075,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.039397364351956274,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.04943110704237102,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.660377358490566,
						"acc_stderr,none": 0.029146904747798325,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.6458333333333334,
						"acc_stderr,none": 0.039994111357535424,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.05009082659620332,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252604,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.5664739884393064,
						"acc_stderr,none": 0.03778621079092055,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.38235294117647056,
						"acc_stderr,none": 0.04835503696107224,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.66,
						"acc_stderr,none": 0.04760952285695238,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.5106382978723404,
						"acc_stderr,none": 0.03267862331014063,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.4824561403508772,
						"acc_stderr,none": 0.04700708033551038,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.5517241379310345,
						"acc_stderr,none": 0.04144311810878152,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.41798941798941797,
						"acc_stderr,none": 0.02540255550326091,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.36507936507936506,
						"acc_stderr,none": 0.043062412591271526,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.6935483870967742,
						"acc_stderr,none": 0.026226485652553883,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.5073891625615764,
						"acc_stderr,none": 0.0351760354036101,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.04902071300001974,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.7272727272727273,
						"acc_stderr,none": 0.03477691162163659,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.7525252525252525,
						"acc_stderr,none": 0.03074630074212451,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.8186528497409327,
						"acc_stderr,none": 0.027807032360686088,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.5692307692307692,
						"acc_stderr,none": 0.025106820660539753,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.35185185185185186,
						"acc_stderr,none": 0.02911661760608301,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.6554621848739496,
						"acc_stderr,none": 0.03086868260412163,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.36423841059602646,
						"acc_stderr,none": 0.03929111781242741,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.7889908256880734,
						"acc_stderr,none": 0.01749392240411265,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.46296296296296297,
						"acc_stderr,none": 0.03400603625538272,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.7794117647058824,
						"acc_stderr,none": 0.0291022543896741,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.7805907172995781,
						"acc_stderr,none": 0.026939106581553945,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.6278026905829597,
						"acc_stderr,none": 0.032443052830087304,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.6870229007633588,
						"acc_stderr,none": 0.04066962905677697,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.5375132837407015,
						"acc_stderr,none": 0.14096864671190043,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.7520661157024794,
						"acc_stderr,none": 0.03941897526516302,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.7037037037037037,
						"acc_stderr,none": 0.04414343666854932,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.7607361963190185,
						"acc_stderr,none": 0.033519538795212696,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.4642857142857143,
						"acc_stderr,none": 0.04733667890053756,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.7378640776699029,
						"acc_stderr,none": 0.04354631077260595,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.8675213675213675,
						"acc_stderr,none": 0.02220930907316562,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.7905491698595147,
						"acc_stderr,none": 0.014551310568143704,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.6589595375722543,
						"acc_stderr,none": 0.025522474632121615,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.3396648044692737,
						"acc_stderr,none": 0.015839400406212494,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.6699346405228758,
						"acc_stderr,none": 0.02692565465361569,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.664628258770518,
						"acc_stderr,none": 0.09811125295244115,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.6463022508038585,
						"acc_stderr,none": 0.02715520810320088,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.6635802469135802,
						"acc_stderr,none": 0.026289734945952922,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.46099290780141844,
						"acc_stderr,none": 0.02973659252642444,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.4178617992177314,
						"acc_stderr,none": 0.012596744108998564,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.6617647058823529,
						"acc_stderr,none": 0.028739328513983576,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.5866013071895425,
						"acc_stderr,none": 0.019922115682786685,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.6636363636363637,
						"acc_stderr,none": 0.04525393596302505,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.7061224489795919,
						"acc_stderr,none": 0.029162738410249772,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.6873578160545987,
						"acc_stderr,none": 0.08390077225869963,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.8407960199004975,
						"acc_stderr,none": 0.02587064676616913,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.4969869965112591,
						"acc_stderr,none": 0.1084738786137943,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.039427724440366234,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.463855421686747,
						"acc_stderr,none": 0.03882310850890594,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.8128654970760234,
						"acc_stderr,none": 0.029913127232368036,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.5536423841059602,
						"acc_stderr,none": 0.005018028376381182,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.5539056143205858,
						"acc_stderr,none": 0.0050134006643811875,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7352941176470589,
						"acc_stderr,none": 0.021868305754262178,
						"alias": "mrpc",
						"f1,none": 0.8322981366459627,
						"f1_stderr,none": 0.015939263652898012
					},
					"multimedqa": {
						"acc,none": 0.5178140525195174,
						"acc_norm,none": 0.4779975719488681,
						"acc_norm_stderr,none": 0.00015397501597299025,
						"acc_stderr,none": 0.06923035174785634,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.33683993399339934,
						"acc_stderr,none": 0.006788666280052222,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7554552294958619,
						"mrr_stderr,none": 0.010019031778523878,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407435,
						"r@2,none": 0.39954853273137697,
						"r@2_stderr,none": 0.016464634337526422
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6925319789315275,
						"mrr_stderr,none": 0.01050046985017331,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.01473704740275095,
						"r@2,none": 0.43340857787810383,
						"r@2_stderr,none": 0.016657587894501218
					},
					"openbookqa": {
						"acc,none": 0.354,
						"acc_norm,none": 0.454,
						"acc_norm_stderr,none": 0.022288147591176945,
						"acc_stderr,none": 0.021407582047916447,
						"alias": "openbookqa"
					},
					"piqa": {
						"acc,none": 0.8019586507072906,
						"acc_norm,none": 0.8063112078346029,
						"acc_norm_stderr,none": 0.009220384152336641,
						"acc_stderr,none": 0.009298209954776725,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.4658411614005124,
						"acc_norm,none": 0.4383539709649872,
						"acc_norm_stderr,none": 0.0036250747825807726,
						"acc_stderr,none": 0.0036444105122983633,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.758,
						"acc_stderr,none": 0.01917308567833712,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7283806813415201,
						"acc_norm,none": 0.7003817448395876,
						"acc_norm_stderr,none": 0.005970464335515298,
						"acc_stderr,none": 0.16111508221444962,
						"alias": "pythia",
						"bits_per_byte,none": 0.615588939628387,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5321833401531846,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3987038759067074,
						"perplexity_stderr,none": 0.07238587778792913,
						"word_perplexity,none": 9.79354701944479,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.5478723404255319,
						"acc_norm,none": 0.5797872340425532,
						"acc_norm_stderr,none": 0.0785497170880167,
						"acc_stderr,none": 0.05995870420406338,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.6583333333333333,
						"acc_norm,none": 0.7333333333333333,
						"acc_norm_stderr,none": 0.040537932807004046,
						"acc_stderr,none": 0.043476116843170064,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.55625,
						"acc_norm,none": 0.6125,
						"acc_norm_stderr,none": 0.03863583812241406,
						"acc_stderr,none": 0.039400853796259426,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.4964788732394366,
						"acc_norm,none": 0.4964788732394366,
						"acc_norm_stderr,none": 0.029721177900313853,
						"acc_stderr,none": 0.029721177900313853,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5831960461285008,
						"acc_stderr,none": 0.006671098492098741,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5874103388572842,
						"acc_stderr,none": 0.002448406162571853,
						"alias": "qqp",
						"f1,none": 0.4376495971412197,
						"f1_stderr,none": 0.0036200081015969367
					},
					"race": {
						"acc,none": 0.46124401913875596,
						"acc_stderr,none": 0.015428054940919853,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.703971119133574,
						"acc_stderr,none": 0.02747830386297935,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.953,
						"acc_norm,none": 0.906,
						"acc_norm_stderr,none": 0.009233052000787714,
						"acc_stderr,none": 0.006695956678163039,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.703971119133574,
						"acc_stderr,none": 0.02747830386297935,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.856651376146789,
						"acc_stderr,none": 0.011873800423675024,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5982205338398481,
						"acc_norm,none": 0.7867639708087574,
						"acc_norm_stderr,none": 0.002895897908442977,
						"acc_stderr,none": 0.0034662130168877847,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.8909187714219161,
						"acc_stderr,none": 0.06492188924400273,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.9704527243589743,
						"acc_stderr,none": 0.0016947879911930024,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.9809465896422418,
						"acc_stderr,none": 0.0013763804198534339,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.7259803921568627,
						"acc_stderr,none": 0.004416461713646375,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.5969340297618705,
						"acc_stderr,none": 0.0015597689440213628,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.5446756425948592,
						"bleu_acc_stderr,none": 0.017433490102538775,
						"bleu_diff,none": 4.366315081410718,
						"bleu_diff_stderr,none": 0.6138335139825885,
						"bleu_max,none": 21.27473498730541,
						"bleu_max_stderr,none": 0.7533871133042884,
						"rouge1_acc,none": 0.5716034271725826,
						"rouge1_acc_stderr,none": 0.01732308859731477,
						"rouge1_diff,none": 5.717141224803091,
						"rouge1_diff_stderr,none": 0.870296684878197,
						"rouge1_max,none": 46.258060058643956,
						"rouge1_max_stderr,none": 0.8552045039813724,
						"rouge2_acc,none": 0.44920440636474906,
						"rouge2_acc_stderr,none": 0.017412941986115277,
						"rouge2_diff,none": 5.424527805363653,
						"rouge2_diff_stderr,none": 0.9322851416532594,
						"rouge2_max,none": 31.09098851770095,
						"rouge2_max_stderr,none": 0.9802688849674306,
						"rougeL_acc,none": 0.5532435740514076,
						"rougeL_acc_stderr,none": 0.017403977522557144,
						"rougeL_diff,none": 5.486366464760439,
						"rougeL_diff_stderr,none": 0.8794580025029041,
						"rougeL_max,none": 43.2095248401885,
						"rougeL_max_stderr,none": 0.871503759712897
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.5446756425948592,
						"bleu_acc_stderr,none": 0.017433490102538775,
						"bleu_diff,none": 4.366315081410718,
						"bleu_diff_stderr,none": 0.6138335139825885,
						"bleu_max,none": 21.27473498730541,
						"bleu_max_stderr,none": 0.7533871133042884,
						"rouge1_acc,none": 0.5716034271725826,
						"rouge1_acc_stderr,none": 0.01732308859731477,
						"rouge1_diff,none": 5.717141224803091,
						"rouge1_diff_stderr,none": 0.870296684878197,
						"rouge1_max,none": 46.258060058643956,
						"rouge1_max_stderr,none": 0.8552045039813724,
						"rouge2_acc,none": 0.44920440636474906,
						"rouge2_acc_stderr,none": 0.017412941986115277,
						"rouge2_diff,none": 5.424527805363653,
						"rouge2_diff_stderr,none": 0.9322851416532594,
						"rouge2_max,none": 31.09098851770095,
						"rouge2_max_stderr,none": 0.9802688849674306,
						"rougeL_acc,none": 0.5532435740514076,
						"rougeL_acc_stderr,none": 0.017403977522557144,
						"rougeL_diff,none": 5.486366464760439,
						"rougeL_diff_stderr,none": 0.8794580025029041,
						"rougeL_max,none": 43.2095248401885,
						"rougeL_max_stderr,none": 0.871503759712897
					},
					"truthfulqa_mc1": {
						"acc,none": 0.5250917992656059,
						"acc_stderr,none": 0.017481446804104003,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.6687762602581351,
						"acc_stderr,none": 0.015232622259027086,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.060039370078740155,
						"exact_match_stderr,none": 0.005271302429704627
					},
					"wic": {
						"acc,none": 0.6018808777429467,
						"acc_stderr,none": 0.019395102343077997,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.615588939628387,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5321833401531846,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 9.79354701944479,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7371744277821626,
						"acc_stderr,none": 0.012370922527262008,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.6197183098591549,
						"acc_stderr,none": 0.05802308977399397,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6346153846153846,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8827838827838828,
						"acc_stderr,none": 0.01950457139863538,
						"alias": "wsc273"
					}
				}
			}
		},
		"name": "m8than/mistral-7b-instruct-0.2"
	},
	"meta-llama/Llama-2-7b-chat-hf": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6417700112739572,
						"acc_norm,none": 0.6138669673055243,
						"acc_norm_stderr,none": 0.0808545714371271,
						"acc_stderr,none": 0.09463686154250861,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.413125,
						"acc_stderr,none": 0.015247747522547985,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.30155,
						"acc_stderr,none": 0.12080124493659827,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8035820895522388,
						"acc_stderr,none": 0.14499572678683972,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.3276374442793461,
						"acc_norm,none": 0.3276374442793461,
						"acc_norm_stderr,none": 0.12581133649631557,
						"acc_stderr,none": 0.12581133649631557,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.33595233983767914,
						"acc_norm,none": 0.33595233983767914,
						"acc_norm_stderr,none": 0.07152832991959227,
						"acc_stderr,none": 0.07152832991959227,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.63006169439144,
						"likelihood_diff_stderr,none": 0.5744039717270023,
						"pct_stereotype,none": 0.5709600477042337,
						"pct_stereotype_stderr,none": 0.08607765193290201
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.03543307086614173,
						"exact_match_stderr,none": 0.0041021885546655475
					},
					"glue": {
						"acc,none": 0.4895337065269176,
						"acc_stderr,none": 0.00096090966575593,
						"alias": "glue",
						"f1,none": 0.4838809799217942,
						"f1_stderr,none": 9.500991312938969e-05,
						"mcc,none": 0.042422809545074935,
						"mcc_stderr,none": 0.030999041061600608
					},
					"kmmlu": {
						"acc,none": 0.2778226970834537,
						"acc_norm,none": 0.2778226970834537,
						"acc_norm_stderr,none": 0.02874125338165857,
						"acc_stderr,none": 0.02874125338165857,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5051523788642841,
						"acc_norm,none": 0.476,
						"acc_norm_stderr,none": 0.000499847695390778,
						"acc_stderr,none": 0.032855130670812395,
						"alias": "kobest",
						"f1,none": 0.4133038274651914,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.685231903745391,
						"acc_stderr,none": 0.012811523002124903,
						"alias": "lambada",
						"perplexity,none": 3.6181896161210485,
						"perplexity_stderr,none": 0.20070597826065645
					},
					"lambada_cloze": {
						"acc,none": 0.26964874830196,
						"acc_stderr,none": 0.023571225973558887,
						"alias": "lambada_cloze",
						"perplexity,none": 59.16488358266109,
						"perplexity_stderr,none": 17.205019334805293
					},
					"lambada_multilingual": {
						"acc,none": 0.4823986027556763,
						"acc_stderr,none": 0.09072647847465312,
						"alias": "lambada_multilingual",
						"perplexity,none": 57.042172854076604,
						"perplexity_stderr,none": 26.965652387645022
					},
					"mmlu": {
						"acc,none": 0.46246973365617433,
						"acc_stderr,none": 0.11718152391648695,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.4308182784272051,
						"acc_stderr,none": 0.11623107823695161,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.5490827164467331,
						"acc_stderr,none": 0.09996282778644679,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.5297367565810854,
						"acc_stderr,none": 0.09854693982666178,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3587059942911513,
						"acc_stderr,none": 0.09468851310930955,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.408374733853797,
						"acc_norm,none": 0.371433252298277,
						"acc_norm_stderr,none": 0.00011753217044080902,
						"acc_stderr,none": 0.08811855406751401,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.44192857142857145,
						"acc_stderr,none": 0.04262578926290293,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7365426878291983,
						"acc_norm,none": 0.6178353531205103,
						"acc_norm_stderr,none": 0.008181587220774588,
						"acc_stderr,none": 0.14193765970017502,
						"alias": "pythia",
						"bits_per_byte,none": 0.6606345727221108,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5807777795639693,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2669694074727325,
						"perplexity_stderr,none": 0.08670981010512012,
						"word_perplexity,none": 11.573158664725804,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.50177304964539,
						"acc_norm,none": 0.5283687943262412,
						"acc_norm_stderr,none": 0.06916068366031697,
						"acc_stderr,none": 0.05671904085534381,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.7185118631659512,
						"acc_stderr,none": 0.07014856545878871,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.37722048975258216,
						"acc_stderr,none": 0.0017006425139196304,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.4467564259485924,
						"bleu_acc_stderr,none": 0.017403977522557144,
						"bleu_diff,none": -1.7707450061348697,
						"bleu_diff_stderr,none": 0.6074797313293264,
						"bleu_max,none": 20.270731760309094,
						"bleu_max_stderr,none": 0.696522073489329,
						"rouge1_acc,none": 0.43818849449204406,
						"rouge1_acc_stderr,none": 0.01736923616440443,
						"rouge1_diff,none": -1.9634603403693753,
						"rouge1_diff_stderr,none": 0.7429761242693544,
						"rouge1_max,none": 44.88360003234336,
						"rouge1_max_stderr,none": 0.804401768251018,
						"rouge2_acc,none": 0.37821297429620565,
						"rouge2_acc_stderr,none": 0.01697633590754687,
						"rouge2_diff,none": -3.3910272731866984,
						"rouge2_diff_stderr,none": 0.8676854434022686,
						"rouge2_max,none": 29.83853821263299,
						"rouge2_max_stderr,none": 0.8856794698145992,
						"rougeL_acc,none": 0.4357405140758874,
						"rougeL_acc_stderr,none": 0.01735834539886313,
						"rougeL_diff,none": -2.3291268007159838,
						"rougeL_diff_stderr,none": 0.7395397657573707,
						"rougeL_max,none": 41.62103860651703,
						"rougeL_max_stderr,none": 0.8052149731246078
					},
					"xcopa": {
						"acc,none": 0.5598181818181818,
						"acc_stderr,none": 0.054954054116450136,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.389718875502008,
						"acc_stderr,none": 0.04489098206900832,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.58552433668251,
						"acc_stderr,none": 0.07646648104203008,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.76219375140481,
						"acc_stderr,none": 0.051925008888302905,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6417700112739572,
						"acc_norm,none": 0.6138669673055243,
						"acc_norm_stderr,none": 0.0808545714371271,
						"acc_stderr,none": 0.09463686154250861,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.413125,
						"acc_stderr,none": 0.015247747522547985,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.418,
						"acc_stderr,none": 0.015605111967541944,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.411,
						"acc_stderr,none": 0.015566673418599273,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.41083333333333333,
						"acc_stderr,none": 0.01420830688776151,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.44197952218430037,
						"acc_norm,none": 0.44368600682593856,
						"acc_norm_stderr,none": 0.014518421825670444,
						"acc_stderr,none": 0.014512682523128345,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7403198653198653,
						"acc_norm,none": 0.6978114478114478,
						"acc_norm_stderr,none": 0.009422719042483183,
						"acc_stderr,none": 0.00899699042856222,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.30155,
						"acc_stderr,none": 0.12080124493659827,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.3715,
						"acc_stderr,none": 0.010807510172933646,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.1005,
						"acc_stderr,none": 0.006724766631127047,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.3795,
						"acc_stderr,none": 0.010853514379554386,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.3925,
						"acc_stderr,none": 0.010921607746018006,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.352,
						"acc_stderr,none": 0.010681996654477078,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.4865,
						"acc_stderr,none": 0.01117905902481682,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.3495,
						"acc_stderr,none": 0.010664508468299673,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.3265,
						"acc_stderr,none": 0.010488273305862498,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.257,
						"acc_stderr,none": 0.009773600238950754,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.008676789587852495,
						"acc_stderr,none": 0.001932172661463566,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8035820895522388,
						"acc_stderr,none": 0.14499572678683972,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345717,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.002637794146243751,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437642,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.747,
						"acc_stderr,none": 0.01375427861358708,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662737,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.699,
						"acc_stderr,none": 0.014512395033543164,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.01574000469338385,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.013607356839598121,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.833,
						"acc_stderr,none": 0.011800434324644608,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727073,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099237,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787723,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177549,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753653,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397238,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103301,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557419,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.012975838021968777,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.015204840912919496,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.726,
						"acc_stderr,none": 0.014111099288259588,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425647,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333377,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.012259457340938572,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656804,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.464,
						"acc_stderr,none": 0.015778243024904586,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783196,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.759,
						"acc_stderr,none": 0.013531522534515445,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858924,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.014078856992462618,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.843,
						"acc_stderr,none": 0.011510146979230184,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.00973955126578513,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504406,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.011297239823409305,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397233,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.01574000469338384,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.623,
						"acc_stderr,none": 0.015333170125779862,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.014683991951087973,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397236,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.826,
						"acc_stderr,none": 0.011994493230973437,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890124,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.01016928780271333,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.743,
						"acc_stderr,none": 0.013825416526895042,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.844,
						"acc_stderr,none": 0.011480235006122348,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.002637794146243788,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.012510816141264352,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.64,
						"acc_stderr,none": 0.015186527932040122,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.015316971293620996,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787724,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336662,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.003148000938676777,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.651,
						"acc_stderr,none": 0.015080663991563098,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.491,
						"acc_stderr,none": 0.015816736995005392,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724451,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.01126614068463216,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.537,
						"acc_stderr,none": 0.01577592722726242,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037186,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386688,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.011828605831454259,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.011884495834541667,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336666,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942314,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380709,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706852,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.249,
						"acc_stderr,none": 0.013681600278702308,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.257,
						"acc_stderr,none": 0.01382541652689503,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7990825688073394,
						"acc_stderr,none": 0.007008049757657985,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.5714285714285714,
						"acc_stderr,none": 0.06672848092813059,
						"alias": "cb",
						"f1,none": 0.368075117370892,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.3276374442793461,
						"acc_norm,none": 0.3276374442793461,
						"acc_norm_stderr,none": 0.12581133649631557,
						"acc_stderr,none": 0.12581133649631557,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.1836734693877551,
						"acc_norm,none": 0.1836734693877551,
						"acc_norm_stderr,none": 0.05589005688828227,
						"acc_stderr,none": 0.05589005688828227,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275463,
						"acc_stderr,none": 0.08124094920275463,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522557,
						"acc_stderr,none": 0.11369720523522557,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.23404255319148937,
						"acc_norm,none": 0.23404255319148937,
						"acc_norm_stderr,none": 0.06242676343682884,
						"acc_stderr,none": 0.06242676343682884,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.10094660663590604,
						"acc_stderr,none": 0.10094660663590604,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.41818181818181815,
						"acc_norm,none": 0.41818181818181815,
						"acc_norm_stderr,none": 0.0671242332357016,
						"acc_stderr,none": 0.0671242332357016,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.43243243243243246,
						"acc_norm,none": 0.43243243243243246,
						"acc_norm_stderr,none": 0.08256893144064577,
						"acc_stderr,none": 0.08256893144064577,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.1875,
						"acc_norm,none": 0.1875,
						"acc_norm_stderr,none": 0.10077822185373188,
						"acc_stderr,none": 0.10077822185373188,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.4482758620689655,
						"acc_norm,none": 0.4482758620689655,
						"acc_norm_stderr,none": 0.09398415777506854,
						"acc_stderr,none": 0.09398415777506854,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.07802030664724673,
						"acc_stderr,none": 0.07802030664724673,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.3870967741935484,
						"acc_norm,none": 0.3870967741935484,
						"acc_norm_stderr,none": 0.08892934678767887,
						"acc_stderr,none": 0.08892934678767887,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.55,
						"acc_norm,none": 0.55,
						"acc_norm_stderr,none": 0.11413288653790232,
						"acc_stderr,none": 0.11413288653790232,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0879391124952055,
						"acc_stderr,none": 0.0879391124952055,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4583333333333333,
						"acc_norm,none": 0.4583333333333333,
						"acc_norm_stderr,none": 0.10389457216622949,
						"acc_stderr,none": 0.10389457216622949,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.4583333333333333,
						"acc_norm,none": 0.4583333333333333,
						"acc_norm_stderr,none": 0.10389457216622949,
						"acc_stderr,none": 0.10389457216622949,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09933992677987828,
						"acc_stderr,none": 0.09933992677987828,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966263,
						"acc_stderr,none": 0.06520506636966263,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.16326530612244897,
						"acc_norm,none": 0.16326530612244897,
						"acc_norm_stderr,none": 0.05334825558285076,
						"acc_stderr,none": 0.05334825558285076,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.4318181818181818,
						"acc_norm,none": 0.4318181818181818,
						"acc_norm_stderr,none": 0.07553702921752882,
						"acc_stderr,none": 0.07553702921752882,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.41304347826086957,
						"acc_norm,none": 0.41304347826086957,
						"acc_norm_stderr,none": 0.07339975224406144,
						"acc_stderr,none": 0.07339975224406144,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.33595233983767914,
						"acc_norm,none": 0.33595233983767914,
						"acc_norm_stderr,none": 0.07152832991959227,
						"acc_stderr,none": 0.07152832991959227,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.33136094674556216,
						"acc_norm,none": 0.33136094674556216,
						"acc_norm_stderr,none": 0.03631548844087169,
						"acc_stderr,none": 0.03631548844087169,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.033953425890020345,
						"acc_stderr,none": 0.033953425890020345,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.29878048780487804,
						"acc_norm,none": 0.29878048780487804,
						"acc_norm_stderr,none": 0.035851663369096606,
						"acc_stderr,none": 0.035851663369096606,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.31875,
						"acc_norm,none": 0.31875,
						"acc_norm_stderr,none": 0.036955560385363254,
						"acc_stderr,none": 0.036955560385363254,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3212121212121212,
						"acc_norm,none": 0.3212121212121212,
						"acc_norm_stderr,none": 0.036462049632538115,
						"acc_stderr,none": 0.036462049632538115,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3875598086124402,
						"acc_norm,none": 0.3875598086124402,
						"acc_norm_stderr,none": 0.033780769688873835,
						"acc_stderr,none": 0.033780769688873835,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3816793893129771,
						"acc_norm,none": 0.3816793893129771,
						"acc_norm_stderr,none": 0.04260735157644561,
						"acc_stderr,none": 0.04260735157644561,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3602941176470588,
						"acc_norm,none": 0.3602941176470588,
						"acc_norm_stderr,none": 0.04131919708409121,
						"acc_stderr,none": 0.04131919708409121,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.3644859813084112,
						"acc_norm,none": 0.3644859813084112,
						"acc_norm_stderr,none": 0.04674660221110773,
						"acc_stderr,none": 0.04674660221110773,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.37770897832817335,
						"acc_norm,none": 0.37770897832817335,
						"acc_norm_stderr,none": 0.027017644684186253,
						"acc_stderr,none": 0.027017644684186253,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3431372549019608,
						"acc_norm,none": 0.3431372549019608,
						"acc_norm_stderr,none": 0.033321399446680854,
						"acc_stderr,none": 0.033321399446680854,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.33519553072625696,
						"acc_norm,none": 0.33519553072625696,
						"acc_norm_stderr,none": 0.035382301081428424,
						"acc_stderr,none": 0.035382301081428424,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2869198312236287,
						"acc_norm,none": 0.2869198312236287,
						"acc_norm_stderr,none": 0.02944377302259469,
						"acc_stderr,none": 0.02944377302259469,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.043505468189990605,
						"acc_stderr,none": 0.043505468189990605,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3925233644859813,
						"acc_norm,none": 0.3925233644859813,
						"acc_norm_stderr,none": 0.04742907046004223,
						"acc_stderr,none": 0.04742907046004223,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.04396093377439377,
						"acc_stderr,none": 0.04396093377439377,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.042365112580946315,
						"acc_stderr,none": 0.042365112580946315,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.04176466758604901,
						"acc_stderr,none": 0.04176466758604901,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.3490566037735849,
						"acc_norm,none": 0.3490566037735849,
						"acc_norm_stderr,none": 0.04651841326529026,
						"acc_stderr,none": 0.04651841326529026,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.30036630036630035,
						"acc_norm,none": 0.30036630036630035,
						"acc_norm_stderr,none": 0.027795629283121376,
						"acc_stderr,none": 0.027795629283121376,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3284313725490196,
						"acc_norm,none": 0.3284313725490196,
						"acc_norm_stderr,none": 0.032962451101722294,
						"acc_stderr,none": 0.032962451101722294,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.38011695906432746,
						"acc_norm,none": 0.38011695906432746,
						"acc_norm_stderr,none": 0.037229657413855394,
						"acc_stderr,none": 0.037229657413855394,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.3877551020408163,
						"acc_norm,none": 0.3877551020408163,
						"acc_norm_stderr,none": 0.040324121989960035,
						"acc_stderr,none": 0.040324121989960035,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.30935251798561153,
						"acc_norm,none": 0.30935251798561153,
						"acc_norm_stderr,none": 0.03934735112547113,
						"acc_stderr,none": 0.03934735112547113,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3710691823899371,
						"acc_norm,none": 0.3710691823899371,
						"acc_norm_stderr,none": 0.03843265063227864,
						"acc_stderr,none": 0.03843265063227864,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.4171779141104294,
						"acc_norm,none": 0.4171779141104294,
						"acc_norm_stderr,none": 0.0387410285981808,
						"acc_stderr,none": 0.0387410285981808,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.3372093023255814,
						"acc_norm,none": 0.3372093023255814,
						"acc_norm_stderr,none": 0.03615263198871637,
						"acc_stderr,none": 0.03615263198871637,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.19444444444444445,
						"acc_norm,none": 0.19444444444444445,
						"acc_norm_stderr,none": 0.024980933164903754,
						"acc_stderr,none": 0.024980933164903754,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.3686868686868687,
						"acc_norm,none": 0.3686868686868687,
						"acc_norm_stderr,none": 0.034373055019806184,
						"acc_stderr,none": 0.034373055019806184,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.4327731092436975,
						"acc_norm,none": 0.4327731092436975,
						"acc_norm_stderr,none": 0.032183581077426124,
						"acc_stderr,none": 0.032183581077426124,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2782608695652174,
						"acc_norm,none": 0.2782608695652174,
						"acc_norm_stderr,none": 0.029614094221633743,
						"acc_stderr,none": 0.029614094221633743,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.34814814814814815,
						"acc_norm,none": 0.34814814814814815,
						"acc_norm_stderr,none": 0.041153246103369526,
						"acc_stderr,none": 0.041153246103369526,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.3706293706293706,
						"acc_norm,none": 0.3706293706293706,
						"acc_norm_stderr,none": 0.04053022174925761,
						"acc_stderr,none": 0.04053022174925761,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26704545454545453,
						"acc_norm,none": 0.26704545454545453,
						"acc_norm_stderr,none": 0.03344352850079126,
						"acc_stderr,none": 0.03344352850079126,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.35570469798657717,
						"acc_norm,none": 0.35570469798657717,
						"acc_norm_stderr,none": 0.03935105907232846,
						"acc_stderr,none": 0.03935105907232846,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.28994082840236685,
						"acc_norm,none": 0.28994082840236685,
						"acc_norm_stderr,none": 0.03500638924911012,
						"acc_stderr,none": 0.03500638924911012,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.3106060606060606,
						"acc_norm,none": 0.3106060606060606,
						"acc_norm_stderr,none": 0.04042993522120926,
						"acc_stderr,none": 0.04042993522120926,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3474576271186441,
						"acc_norm,none": 0.3474576271186441,
						"acc_norm_stderr,none": 0.04402124821792678,
						"acc_stderr,none": 0.04402124821792678,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.04461272175910508,
						"acc_stderr,none": 0.04461272175910508,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3776223776223776,
						"acc_norm,none": 0.3776223776223776,
						"acc_norm_stderr,none": 0.04068287849209808,
						"acc_stderr,none": 0.04068287849209808,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.3492063492063492,
						"acc_norm,none": 0.3492063492063492,
						"acc_norm_stderr,none": 0.04263906892795132,
						"acc_stderr,none": 0.04263906892795132,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.372972972972973,
						"acc_norm,none": 0.372972972972973,
						"acc_norm_stderr,none": 0.03565109718452138,
						"acc_stderr,none": 0.03565109718452138,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3372093023255814,
						"acc_norm,none": 0.3372093023255814,
						"acc_norm_stderr,none": 0.036152631988716356,
						"acc_stderr,none": 0.036152631988716356,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.34549878345498786,
						"acc_norm,none": 0.34549878345498786,
						"acc_norm_stderr,none": 0.02348478355758493,
						"acc_stderr,none": 0.02348478355758493,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.5981308411214953,
						"acc_norm,none": 0.5981308411214953,
						"acc_norm_stderr,none": 0.033593142745718396,
						"acc_stderr,none": 0.033593142745718396,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3089430894308943,
						"acc_norm,none": 0.3089430894308943,
						"acc_norm_stderr,none": 0.041832732587876245,
						"acc_stderr,none": 0.041832732587876245,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.21311475409836064,
						"acc_norm,none": 0.21311475409836064,
						"acc_norm_stderr,none": 0.03722800595170433,
						"acc_stderr,none": 0.03722800595170433,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3523809523809524,
						"acc_norm,none": 0.3523809523809524,
						"acc_norm_stderr,none": 0.03304401999334816,
						"acc_stderr,none": 0.03304401999334816,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.37222222222222223,
						"acc_norm,none": 0.37222222222222223,
						"acc_norm_stderr,none": 0.03613080206107231,
						"acc_stderr,none": 0.03613080206107231,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.42328042328042326,
						"acc_norm,none": 0.42328042328042326,
						"acc_norm_stderr,none": 0.036034418132512874,
						"acc_stderr,none": 0.036034418132512874,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.22413793103448276,
						"acc_norm,none": 0.22413793103448276,
						"acc_norm_stderr,none": 0.03888669370117824,
						"acc_stderr,none": 0.03888669370117824,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3310344827586207,
						"acc_norm,none": 0.3310344827586207,
						"acc_norm_stderr,none": 0.03921545312467122,
						"acc_stderr,none": 0.03921545312467122,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.4857142857142857,
						"acc_norm,none": 0.4857142857142857,
						"acc_norm_stderr,none": 0.04900901784830281,
						"acc_stderr,none": 0.04900901784830281,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.32571428571428573,
						"acc_norm,none": 0.32571428571428573,
						"acc_norm_stderr,none": 0.03552759084811122,
						"acc_stderr,none": 0.03552759084811122,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.3080568720379147,
						"acc_norm,none": 0.3080568720379147,
						"acc_norm_stderr,none": 0.031859650225872053,
						"acc_stderr,none": 0.031859650225872053,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.24468085106382978,
						"acc_norm,none": 0.24468085106382978,
						"acc_norm_stderr,none": 0.022199827758281294,
						"acc_stderr,none": 0.022199827758281294,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3706896551724138,
						"acc_norm,none": 0.3706896551724138,
						"acc_norm_stderr,none": 0.03177837449226177,
						"acc_stderr,none": 0.03177837449226177,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.39655172413793105,
						"acc_norm,none": 0.39655172413793105,
						"acc_norm_stderr,none": 0.03719177407817322,
						"acc_stderr,none": 0.03719177407817322,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.31851851851851853,
						"acc_norm,none": 0.31851851851851853,
						"acc_norm_stderr,none": 0.04024778401977108,
						"acc_stderr,none": 0.04024778401977108,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3584070796460177,
						"acc_norm,none": 0.3584070796460177,
						"acc_norm_stderr,none": 0.03196883516493523,
						"acc_stderr,none": 0.03196883516493523,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.037563357751878974,
						"acc_stderr,none": 0.037563357751878974,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.031629303956979486,
						"acc_stderr,none": 0.031629303956979486,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.33727810650887574,
						"acc_norm,none": 0.33727810650887574,
						"acc_norm_stderr,none": 0.03647582250277504,
						"acc_stderr,none": 0.03647582250277504,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.37888198757763975,
						"acc_norm,none": 0.37888198757763975,
						"acc_norm_stderr,none": 0.03835120818393935,
						"acc_stderr,none": 0.03835120818393935,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.3625,
						"acc_norm,none": 0.3625,
						"acc_norm_stderr,none": 0.03812374340644889,
						"acc_stderr,none": 0.03812374340644889,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.042422809545074935,
						"mcc_stderr,none": 0.030999041061600608
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.03379976689896308,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.63006169439144,
						"likelihood_diff_stderr,none": 0.5744039717270023,
						"pct_stereotype,none": 0.5709600477042337,
						"pct_stereotype_stderr,none": 0.08607765193290201
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 4.365720936565365,
						"likelihood_diff_stderr,none": 0.09866325722047321,
						"pct_stereotype,none": 0.6374478234943352,
						"pct_stereotype_stderr,none": 0.011742770482379051
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 5.030007729163537,
						"likelihood_diff_stderr,none": 0.4509543244913358,
						"pct_stereotype,none": 0.7142857142857143,
						"pct_stereotype_stderr,none": 0.04761904761904759
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.779386000199751,
						"likelihood_diff_stderr,none": 2.5476536740538234,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726127
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.730907322810246,
						"likelihood_diff_stderr,none": 0.6802927921748851,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 3.485739195346832,
						"likelihood_diff_stderr,none": 0.19289373350613756,
						"pct_stereotype,none": 0.615625,
						"pct_stereotype_stderr,none": 0.027235813331371504
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 4.000834102983828,
						"likelihood_diff_stderr,none": 0.25361732551950644,
						"pct_stereotype,none": 0.6018518518518519,
						"pct_stereotype_stderr,none": 0.03338473403207401
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.486137019263373,
						"likelihood_diff_stderr,none": 0.4577343807841529,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 4.0812082440834345,
						"likelihood_diff_stderr,none": 0.16982657816986538,
						"pct_stereotype,none": 0.5334645669291339,
						"pct_stereotype_stderr,none": 0.022155988267174086
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.712209735904728,
						"likelihood_diff_stderr,none": 0.364937243089491,
						"pct_stereotype,none": 0.7837837837837838,
						"pct_stereotype_stderr,none": 0.03925056618715645
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.998516328873173,
						"likelihood_diff_stderr,none": 0.5393981227782015,
						"pct_stereotype,none": 0.7956989247311828,
						"pct_stereotype_stderr,none": 0.04203545939892302
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 5.198481178283691,
						"likelihood_diff_stderr,none": 0.2735153994361066,
						"pct_stereotype,none": 0.7,
						"pct_stereotype_stderr,none": 0.033333333333333354
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 4.894402452217516,
						"likelihood_diff_stderr,none": 0.11138643493786074,
						"pct_stereotype,none": 0.5044722719141324,
						"pct_stereotype_stderr,none": 0.012212810647205388
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 4.257103559705946,
						"likelihood_diff_stderr,none": 0.4200438016008984,
						"pct_stereotype,none": 0.5555555555555556,
						"pct_stereotype_stderr,none": 0.052671718126664185
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.275528540978065,
						"likelihood_diff_stderr,none": 0.485082604774444,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 7.101538918235085,
						"likelihood_diff_stderr,none": 0.6628978509193643,
						"pct_stereotype,none": 0.5909090909090909,
						"pct_stereotype_stderr,none": 0.06098367211363066
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 4.399171062719042,
						"likelihood_diff_stderr,none": 0.21656829678938225,
						"pct_stereotype,none": 0.514018691588785,
						"pct_stereotype_stderr,none": 0.027939861549302364
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 5.106987376458089,
						"likelihood_diff_stderr,none": 0.3190291060781744,
						"pct_stereotype,none": 0.42292490118577075,
						"pct_stereotype_stderr,none": 0.031120568731718614
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 4.131572617424859,
						"likelihood_diff_stderr,none": 0.5093656702002783,
						"pct_stereotype,none": 0.5833333333333334,
						"pct_stereotype_stderr,none": 0.05850912479161746
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 4.876758824224058,
						"likelihood_diff_stderr,none": 0.21395096893563906,
						"pct_stereotype,none": 0.358695652173913,
						"pct_stereotype_stderr,none": 0.022386634341410947
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 4.63730700948964,
						"likelihood_diff_stderr,none": 0.4559048906112993,
						"pct_stereotype,none": 0.6086956521739131,
						"pct_stereotype_stderr,none": 0.04570934635111713
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 5.485962291340251,
						"likelihood_diff_stderr,none": 0.4088799005890507,
						"pct_stereotype,none": 0.7802197802197802,
						"pct_stereotype_stderr,none": 0.043649726328985346
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 5.35200649378251,
						"likelihood_diff_stderr,none": 0.342533476841723,
						"pct_stereotype,none": 0.6683673469387755,
						"pct_stereotype_stderr,none": 0.03371467279183506
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.03543307086614173,
						"exact_match_stderr,none": 0.0041021885546655475
					},
					"glue": {
						"acc,none": 0.4895337065269176,
						"acc_stderr,none": 0.00096090966575593,
						"alias": "glue",
						"f1,none": 0.4838809799217942,
						"f1_stderr,none": 9.500991312938969e-05,
						"mcc,none": 0.042422809545074935,
						"mcc_stderr,none": 0.030999041061600608
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.21531463229719486,
						"exact_match_stderr,get-answer": 0.011322096294579668
					},
					"hellaswag": {
						"acc,none": 0.5779725154351723,
						"acc_norm,none": 0.7540330611431986,
						"acc_norm_stderr,none": 0.004297788888297727,
						"acc_stderr,none": 0.004928735103635843,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2778226970834537,
						"acc_norm,none": 0.2778226970834537,
						"acc_norm_stderr,none": 0.02874125338165857,
						"acc_stderr,none": 0.02874125338165857,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.267,
						"acc_norm,none": 0.267,
						"acc_norm_stderr,none": 0.013996674851796271,
						"acc_stderr,none": 0.013996674851796271,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306263,
						"acc_stderr,none": 0.014158794845306263,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.013860415257527911,
						"acc_stderr,none": 0.013860415257527911,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.304,
						"acc_norm,none": 0.304,
						"acc_norm_stderr,none": 0.014553205687950436,
						"acc_stderr,none": 0.014553205687950436,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.01780880651013786,
						"acc_stderr,none": 0.01780880651013786,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.274,
						"acc_norm,none": 0.274,
						"acc_norm_stderr,none": 0.014111099288259588,
						"acc_stderr,none": 0.014111099288259588,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.338,
						"acc_norm,none": 0.338,
						"acc_norm_stderr,none": 0.014965960710224479,
						"acc_stderr,none": 0.014965960710224479,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.302,
						"acc_norm,none": 0.302,
						"acc_norm_stderr,none": 0.014526080235459546,
						"acc_stderr,none": 0.014526080235459546,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.02960162633044061,
						"acc_stderr,none": 0.02960162633044061,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.289,
						"acc_norm,none": 0.289,
						"acc_norm_stderr,none": 0.014341711358296184,
						"acc_stderr,none": 0.014341711358296184,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.03709560170541631,
						"acc_stderr,none": 0.03709560170541631,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259717,
						"acc_stderr,none": 0.013929286594259717,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.321,
						"acc_norm,none": 0.321,
						"acc_norm_stderr,none": 0.014770821817934645,
						"acc_stderr,none": 0.014770821817934645,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.283,
						"acc_norm,none": 0.283,
						"acc_norm_stderr,none": 0.014251810906481747,
						"acc_stderr,none": 0.014251810906481747,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.013895037677965126,
						"acc_stderr,none": 0.013895037677965126,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485258,
						"acc_stderr,none": 0.014174516461485258,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.255,
						"acc_norm,none": 0.255,
						"acc_norm_stderr,none": 0.013790038620872826,
						"acc_stderr,none": 0.013790038620872826,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568196,
						"acc_stderr,none": 0.014029819522568196,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.014512395033543148,
						"acc_stderr,none": 0.014512395033543148,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909284,
						"acc_stderr,none": 0.04292346959909284,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.281,
						"acc_norm,none": 0.281,
						"acc_norm_stderr,none": 0.01422115470843494,
						"acc_stderr,none": 0.01422115470843494,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.328,
						"acc_norm,none": 0.328,
						"acc_norm_stderr,none": 0.014853842487270329,
						"acc_stderr,none": 0.014853842487270329,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.292,
						"acc_norm,none": 0.292,
						"acc_norm_stderr,none": 0.014385511563477343,
						"acc_stderr,none": 0.014385511563477343,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.014046255632633918,
						"acc_stderr,none": 0.014046255632633918,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.263,
						"acc_norm,none": 0.263,
						"acc_norm_stderr,none": 0.013929286594259736,
						"acc_stderr,none": 0.013929286594259736,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.01382541652689502,
						"acc_stderr,none": 0.01382541652689502,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.2866666666666667,
						"acc_norm,none": 0.2866666666666667,
						"acc_norm_stderr,none": 0.0184765740275212,
						"acc_stderr,none": 0.0184765740275212,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.014205696104091501,
						"acc_stderr,none": 0.014205696104091501,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.274,
						"acc_norm,none": 0.274,
						"acc_norm_stderr,none": 0.014111099288259588,
						"acc_stderr,none": 0.014111099288259588,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.013550631705555953,
						"acc_stderr,none": 0.013550631705555953,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.01435639599990569,
						"acc_stderr,none": 0.01435639599990569,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816508,
						"acc_stderr,none": 0.04229525846816508,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.025041771123531665,
						"acc_stderr,none": 0.025041771123531665,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.01362606581775065,
						"acc_stderr,none": 0.01362606581775065,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462618,
						"acc_stderr,none": 0.014078856992462618,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.013860415257527911,
						"acc_stderr,none": 0.013860415257527911,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.029122423970017443,
						"acc_stderr,none": 0.029122423970017443,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.01360735683959812,
						"acc_stderr,none": 0.01360735683959812,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.266,
						"acc_norm,none": 0.266,
						"acc_norm_stderr,none": 0.013979965645145151,
						"acc_stderr,none": 0.013979965645145151,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.030056479497755487,
						"acc_stderr,none": 0.030056479497755487,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.356,
						"acc_norm,none": 0.356,
						"acc_norm_stderr,none": 0.015149042659306628,
						"acc_stderr,none": 0.015149042659306628,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5051523788642841,
						"acc_norm,none": 0.476,
						"acc_norm_stderr,none": 0.000499847695390778,
						"acc_stderr,none": 0.032855130670812395,
						"alias": "kobest",
						"f1,none": 0.4133038274651914,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5185185185185185,
						"acc_stderr,none": 0.013339608823275216,
						"alias": " - kobest_boolq",
						"f1,none": 0.3698233574044614,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.555,
						"acc_stderr,none": 0.01572330188676094,
						"alias": " - kobest_copa",
						"f1,none": 0.5546254399950358,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.414,
						"acc_norm,none": 0.476,
						"acc_norm_stderr,none": 0.0223572738810164,
						"acc_stderr,none": 0.02204949796982787,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.41021748901354416,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5012594458438288,
						"acc_stderr,none": 0.025125865671612197,
						"alias": " - kobest_sentineg",
						"f1,none": 0.4857243797759866,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604038,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.685231903745391,
						"acc_stderr,none": 0.012811523002124903,
						"alias": "lambada",
						"perplexity,none": 3.6181896161210485,
						"perplexity_stderr,none": 0.20070597826065645
					},
					"lambada_cloze": {
						"acc,none": 0.26964874830196,
						"acc_stderr,none": 0.023571225973558887,
						"alias": "lambada_cloze",
						"perplexity,none": 59.16488358266109,
						"perplexity_stderr,none": 17.205019334805293
					},
					"lambada_multilingual": {
						"acc,none": 0.4823986027556763,
						"acc_stderr,none": 0.09072647847465312,
						"alias": "lambada_multilingual",
						"perplexity,none": 57.042172854076604,
						"perplexity_stderr,none": 26.965652387645022
					},
					"lambada_openai": {
						"acc,none": 0.7073549388705609,
						"acc_stderr,none": 0.006338717071166962,
						"alias": " - lambada_openai",
						"perplexity,none": 3.2669694074727325,
						"perplexity_stderr,none": 0.08670981010512012
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.31515621967785756,
						"acc_stderr,none": 0.006472480817588203,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 25.06730682244588,
						"perplexity_stderr,none": 0.7600306226065451
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3811372016301184,
						"acc_stderr,none": 0.006766279471108957,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 107.88641822332492,
						"perplexity_stderr,none": 8.346927407066568
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7073549388705609,
						"acc_stderr,none": 0.006338717071166964,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.26738661283498,
						"perplexity_stderr,none": 0.08670177584512109
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.3782262759557539,
						"acc_stderr,none": 0.006756224989789186,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 70.10198485582532,
						"perplexity_stderr,none": 4.9188446355598225
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.492140500679216,
						"acc_stderr,none": 0.006965117003048606,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 39.30187309975079,
						"perplexity_stderr,none": 2.704778816496143
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.4531340966427324,
						"acc_stderr,none": 0.006935309823023549,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 64.65320147864703,
						"perplexity_stderr,none": 4.8964710849417665
					},
					"lambada_standard": {
						"acc,none": 0.6631088686202212,
						"acc_stderr,none": 0.006584901457755902,
						"alias": " - lambada_standard",
						"perplexity,none": 3.969017687295701,
						"perplexity_stderr,none": 0.10726017761441657
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.22414127692606248,
						"acc_stderr,none": 0.005809841940554694,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 93.26246034287631,
						"perplexity_stderr,none": 3.1733239960107937
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.3651399491094148,
						"exact_match_stderr,get-answer": 0.01214732308367413
					},
					"logiqa": {
						"acc,none": 0.2626728110599078,
						"acc_norm,none": 0.3102918586789555,
						"acc_norm_stderr,none": 0.01814517613864157,
						"acc_stderr,none": 0.017261598347857544,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.3053435114503817,
						"acc_norm,none": 0.3110687022900763,
						"acc_norm_stderr,none": 0.011679601458370995,
						"acc_stderr,none": 0.011619603364400124,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2877721943048576,
						"acc_norm,none": 0.29447236180904524,
						"acc_norm_stderr,none": 0.00834410722096108,
						"acc_stderr,none": 0.008287708494779906,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.6147002753653887,
						"acc_stderr,none": 0.005008665796520436,
						"alias": "mc_taco",
						"f1,none": 0.56378896882494,
						"f1_stderr,none": 0.006488581243200617
					},
					"medmcqa": {
						"acc,none": 0.3643318192684676,
						"acc_norm,none": 0.3643318192684676,
						"acc_norm_stderr,none": 0.007441693406081491,
						"acc_stderr,none": 0.007441693406081491,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3857030636292223,
						"acc_norm,none": 0.3857030636292223,
						"acc_norm_stderr,none": 0.013648098974225571,
						"acc_stderr,none": 0.013648098974225571,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.46246973365617433,
						"acc_stderr,none": 0.11718152391648695,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.45925925925925926,
						"acc_stderr,none": 0.04304979692464243,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.4868421052631579,
						"acc_stderr,none": 0.04067533136309173,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.05009082659620333,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.5509433962264151,
						"acc_stderr,none": 0.030612730713641092,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4513888888888889,
						"acc_stderr,none": 0.04161402398403279,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3815028901734104,
						"acc_stderr,none": 0.037038511930995194,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.19607843137254902,
						"acc_stderr,none": 0.03950581861179963,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.39574468085106385,
						"acc_stderr,none": 0.03196758697835362,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.3157894736842105,
						"acc_stderr,none": 0.04372748290278007,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4482758620689655,
						"acc_stderr,none": 0.04144311810878151,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2724867724867725,
						"acc_stderr,none": 0.02293097307163335,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.25396825396825395,
						"acc_stderr,none": 0.03893259610604675,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001974,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4870967741935484,
						"acc_stderr,none": 0.028434533152681848,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.3448275862068966,
						"acc_stderr,none": 0.03344283744280458,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5757575757575758,
						"acc_stderr,none": 0.03859268142070262,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.5909090909090909,
						"acc_stderr,none": 0.035029757994130065,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.6787564766839378,
						"acc_stderr,none": 0.033699508685490674,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.4076923076923077,
						"acc_stderr,none": 0.024915243985987847,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2740740740740741,
						"acc_stderr,none": 0.027195934804085626,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.35714285714285715,
						"acc_stderr,none": 0.031124619309328177,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2781456953642384,
						"acc_stderr,none": 0.036586032627637426,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.6238532110091743,
						"acc_stderr,none": 0.020769231968205074,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2638888888888889,
						"acc_stderr,none": 0.03005820270430985,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.6617647058823529,
						"acc_stderr,none": 0.03320574612945431,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.6160337552742616,
						"acc_stderr,none": 0.031658678064106674,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.5874439461883408,
						"acc_stderr,none": 0.03304062175449297,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5572519083969466,
						"acc_stderr,none": 0.04356447202665069,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.4308182784272051,
						"acc_stderr,none": 0.11623107823695161,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.6033057851239669,
						"acc_stderr,none": 0.04465869780531009,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5555555555555556,
						"acc_stderr,none": 0.04803752235190192,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.5644171779141104,
						"acc_stderr,none": 0.038956324641389366,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.35714285714285715,
						"acc_stderr,none": 0.04547960999764376,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.6796116504854369,
						"acc_stderr,none": 0.04620284082280042,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.7564102564102564,
						"acc_stderr,none": 0.0281209665039144,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.050211673156867795,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.6883780332056194,
						"acc_stderr,none": 0.016562433867284176,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.5057803468208093,
						"acc_stderr,none": 0.026917296179149123,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2424581005586592,
						"acc_stderr,none": 0.014333522059217887,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.4869281045751634,
						"acc_stderr,none": 0.028620130800700246,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.5490827164467331,
						"acc_stderr,none": 0.09996282778644679,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.5273311897106109,
						"acc_stderr,none": 0.028355633568328188,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.5401234567901234,
						"acc_stderr,none": 0.02773102275353928,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.3723404255319149,
						"acc_stderr,none": 0.028838921471251455,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.35528031290743156,
						"acc_stderr,none": 0.012223623364044036,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4117647058823529,
						"acc_stderr,none": 0.02989616303312547,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.47058823529411764,
						"acc_stderr,none": 0.02019280827143379,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.5181818181818182,
						"acc_stderr,none": 0.04785964010794916,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.5020408163265306,
						"acc_stderr,none": 0.0320089533497105,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.5297367565810854,
						"acc_stderr,none": 0.09854693982666178,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.7412935323383084,
						"acc_stderr,none": 0.030965903123573026,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3587059942911513,
						"acc_stderr,none": 0.09468851310930955,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.46987951807228917,
						"acc_stderr,none": 0.03885425420866767,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.6900584795321637,
						"acc_stderr,none": 0.035469769593931624,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.4981151299032094,
						"acc_stderr,none": 0.005047123033319277,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.5231895850284785,
						"acc_stderr,none": 0.005037366660989182,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.5490196078431373,
						"acc_stderr,none": 0.024664683843663437,
						"alias": "mrpc",
						"f1,none": 0.5818181818181818,
						"f1_stderr,none": 0.027936495437643143
					},
					"multimedqa": {
						"acc,none": 0.408374733853797,
						"acc_norm,none": 0.371433252298277,
						"acc_norm_stderr,none": 0.00011753217044080902,
						"acc_stderr,none": 0.08811855406751401,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.3386963696369637,
						"acc_stderr,none": 0.006797813014250004,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7176448457486831,
						"mrr_stderr,none": 0.010292095285974205,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407435,
						"r@2,none": 0.4040632054176072,
						"r@2_stderr,none": 0.01649503028890606
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6561324303987951,
						"mrr_stderr,none": 0.010440497978715917,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.01473704740275095,
						"r@2,none": 0.4525959367945824,
						"r@2_stderr,none": 0.0167316086667748
					},
					"openbookqa": {
						"acc,none": 0.334,
						"acc_norm,none": 0.436,
						"acc_norm_stderr,none": 0.0221989546414768,
						"acc_stderr,none": 0.021113492347743734,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.3875,
						"acc_stderr,none": 0.010896386585483742,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.419,
						"acc_stderr,none": 0.011035415270622925,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3865,
						"acc_stderr,none": 0.010891197550868481,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.406,
						"acc_stderr,none": 0.010983729838291724,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5045,
						"acc_stderr,none": 0.011182683094883898,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.011154111668060216,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.454,
						"acc_stderr,none": 0.011135708419359803,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.44192857142857145,
						"acc_stderr,none": 0.04262578926290293,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.763873775843308,
						"acc_norm,none": 0.7709466811751904,
						"acc_norm_stderr,none": 0.009804509865175504,
						"acc_stderr,none": 0.00990896589055821,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2661720751494449,
						"acc_norm,none": 0.2917912040990606,
						"acc_norm_stderr,none": 0.0033211608327903394,
						"acc_stderr,none": 0.0032288770908291973,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.734,
						"acc_stderr,none": 0.01978055967565545,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7365426878291983,
						"acc_norm,none": 0.6178353531205103,
						"acc_norm_stderr,none": 0.008181587220774588,
						"acc_stderr,none": 0.14193765970017502,
						"alias": "pythia",
						"bits_per_byte,none": 0.6606345727221108,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5807777795639693,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.2669694074727325,
						"perplexity_stderr,none": 0.08670981010512012,
						"word_perplexity,none": 11.573158664725804,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.50177304964539,
						"acc_norm,none": 0.5283687943262412,
						"acc_norm_stderr,none": 0.06916068366031697,
						"acc_stderr,none": 0.05671904085534381,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.6083333333333333,
						"acc_norm,none": 0.6666666666666666,
						"acc_norm_stderr,none": 0.04321358157014425,
						"acc_stderr,none": 0.0447461456852782,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.46875,
						"acc_norm,none": 0.53125,
						"acc_norm_stderr,none": 0.039575057062617526,
						"acc_stderr,none": 0.039575057062617526,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.4753521126760563,
						"acc_norm,none": 0.46830985915492956,
						"acc_norm_stderr,none": 0.029662157481845537,
						"acc_stderr,none": 0.029685779730361204,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5791689547867472,
						"acc_stderr,none": 0.006680064788607797,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.4566905763047242,
						"acc_stderr,none": 0.002477354428465408,
						"alias": "qqp",
						"f1,none": 0.48305563400169443,
						"f1_stderr,none": 0.0029906897089828893
					},
					"race": {
						"acc,none": 0.43732057416267944,
						"acc_stderr,none": 0.015352539494924057,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.6967509025270758,
						"acc_stderr,none": 0.027668396293593706,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.94,
						"acc_norm,none": 0.878,
						"acc_norm_stderr,none": 0.010354864712936703,
						"acc_stderr,none": 0.007513751157474925,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6967509025270758,
						"acc_stderr,none": 0.027668396293593706,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8704128440366973,
						"acc_stderr,none": 0.011379797847506316,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5748775367389783,
						"acc_norm,none": 0.7543736878936319,
						"acc_norm_stderr,none": 0.0030434167885667893,
						"acc_stderr,none": 0.003495227256016185,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.7185118631659512,
						"acc_stderr,none": 0.07014856545878871,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.7291666666666666,
						"acc_stderr,none": 0.004447684016110993,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8841593189419277,
						"acc_stderr,none": 0.003221997950910306,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.547843137254902,
						"acc_stderr,none": 0.0049282630475762455,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.37722048975258216,
						"acc_stderr,none": 0.0017006425139196304,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.4467564259485924,
						"bleu_acc_stderr,none": 0.017403977522557144,
						"bleu_diff,none": -1.7707450061348697,
						"bleu_diff_stderr,none": 0.6074797313293264,
						"bleu_max,none": 20.270731760309094,
						"bleu_max_stderr,none": 0.696522073489329,
						"rouge1_acc,none": 0.43818849449204406,
						"rouge1_acc_stderr,none": 0.01736923616440443,
						"rouge1_diff,none": -1.9634603403693753,
						"rouge1_diff_stderr,none": 0.7429761242693544,
						"rouge1_max,none": 44.88360003234336,
						"rouge1_max_stderr,none": 0.804401768251018,
						"rouge2_acc,none": 0.37821297429620565,
						"rouge2_acc_stderr,none": 0.01697633590754687,
						"rouge2_diff,none": -3.3910272731866984,
						"rouge2_diff_stderr,none": 0.8676854434022686,
						"rouge2_max,none": 29.83853821263299,
						"rouge2_max_stderr,none": 0.8856794698145992,
						"rougeL_acc,none": 0.4357405140758874,
						"rougeL_acc_stderr,none": 0.01735834539886313,
						"rougeL_diff,none": -2.3291268007159838,
						"rougeL_diff_stderr,none": 0.7395397657573707,
						"rougeL_max,none": 41.62103860651703,
						"rougeL_max_stderr,none": 0.8052149731246078
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.4467564259485924,
						"bleu_acc_stderr,none": 0.017403977522557144,
						"bleu_diff,none": -1.7707450061348697,
						"bleu_diff_stderr,none": 0.6074797313293264,
						"bleu_max,none": 20.270731760309094,
						"bleu_max_stderr,none": 0.696522073489329,
						"rouge1_acc,none": 0.43818849449204406,
						"rouge1_acc_stderr,none": 0.01736923616440443,
						"rouge1_diff,none": -1.9634603403693753,
						"rouge1_diff_stderr,none": 0.7429761242693544,
						"rouge1_max,none": 44.88360003234336,
						"rouge1_max_stderr,none": 0.804401768251018,
						"rouge2_acc,none": 0.37821297429620565,
						"rouge2_acc_stderr,none": 0.01697633590754687,
						"rouge2_diff,none": -3.3910272731866984,
						"rouge2_diff_stderr,none": 0.8676854434022686,
						"rouge2_max,none": 29.83853821263299,
						"rouge2_max_stderr,none": 0.8856794698145992,
						"rougeL_acc,none": 0.4357405140758874,
						"rougeL_acc_stderr,none": 0.01735834539886313,
						"rougeL_diff,none": -2.3291268007159838,
						"rougeL_diff_stderr,none": 0.7395397657573707,
						"rougeL_max,none": 41.62103860651703,
						"rougeL_max_stderr,none": 0.8052149731246078
					},
					"truthfulqa_mc1": {
						"acc,none": 0.3011015911872705,
						"acc_stderr,none": 0.016058999026100598,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4533393883178938,
						"acc_stderr,none": 0.015648771236558588,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.03543307086614173,
						"exact_match_stderr,none": 0.0041021885546655475
					},
					"wic": {
						"acc,none": 0.5611285266457681,
						"acc_stderr,none": 0.01966211057333337,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6606345727221108,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5807777795639693,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 11.573158664725804,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6621941594317285,
						"acc_stderr,none": 0.013292583502910892,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.6338028169014085,
						"acc_stderr,none": 0.057581843143880006,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6634615384615384,
						"acc_stderr,none": 0.0465593186155004,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8498168498168498,
						"acc_stderr,none": 0.021661514699106627,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5598181818181818,
						"acc_stderr,none": 0.054954054116450136,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.476,
						"acc_stderr,none": 0.022357273881016403,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.021683827539286122,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.668,
						"acc_stderr,none": 0.021081766571222856,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.022378596989230785,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.564,
						"acc_stderr,none": 0.0221989546414768,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.616,
						"acc_stderr,none": 0.021772369465547194,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.612,
						"acc_stderr,none": 0.021814300984787635,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.389718875502008,
						"acc_stderr,none": 0.04489098206900832,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3421686746987952,
						"acc_stderr,none": 0.009509659143015627,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.40080321285140563,
						"acc_stderr,none": 0.009822858473047378,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.42971887550200805,
						"acc_stderr,none": 0.009922572153607784,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3755020080321285,
						"acc_stderr,none": 0.009706422844379822,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5132530120481927,
						"acc_stderr,none": 0.010018551648218469,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.41164658634538154,
						"acc_stderr,none": 0.009864360821750335,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4534136546184739,
						"acc_stderr,none": 0.009978476483838967,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.009593947957927137,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.43453815261044176,
						"acc_stderr,none": 0.009935807354856824,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.342570281124498,
						"acc_stderr,none": 0.009512333319470377,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3353413654618474,
						"acc_stderr,none": 0.009463034891512703,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3646586345381526,
						"acc_stderr,none": 0.009647934990250467,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3377510040160643,
						"acc_stderr,none": 0.009479742273956477,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.38433734939759034,
						"acc_stderr,none": 0.009750238765722523,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3646586345381526,
						"acc_stderr,none": 0.009647934990250467,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.58552433668251,
						"acc_stderr,none": 0.07646648104203008,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5023163467902052,
						"acc_stderr,none": 0.012866987239478041,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7895433487756452,
						"acc_stderr,none": 0.010490129361754561,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6704169424222369,
						"acc_stderr,none": 0.012096687350589679,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5122435473196558,
						"acc_stderr,none": 0.012863267059205548,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5420251489080079,
						"acc_stderr,none": 0.012821595164245273,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6022501654533422,
						"acc_stderr,none": 0.01259519785670352,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.48974189278623426,
						"acc_stderr,none": 0.012864417047980475,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6512243547319656,
						"acc_stderr,none": 0.012264502012981198,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5208471211118465,
						"acc_stderr,none": 0.012855936282881269,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5367306419589676,
						"acc_stderr,none": 0.012832359240206969,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6234281932495036,
						"acc_stderr,none": 0.01246891448965936,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.76219375140481,
						"acc_stderr,none": 0.051925008888302905,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8567741935483871,
						"acc_stderr,none": 0.007266508145410969,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.0506639425494172,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6871741397288843,
						"acc_stderr,none": 0.01497966058106645,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6539923954372624,
						"acc_stderr,none": 0.02938857480054503,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6476190476190476,
						"acc_stderr,none": 0.02695883963250933,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6071428571428571,
						"acc_stderr,none": 0.02177603367171575,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "meta-llama/Llama-2-7b-chat-hf"
	},
	"meta-llama/Llama-2-7b-hf": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6541713641488163,
						"acc_norm,none": 0.6510710259301015,
						"acc_norm_stderr,none": 0.09021379397510795,
						"acc_stderr,none": 0.10560147953049626,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3696875,
						"acc_stderr,none": 0.015653487597805493,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.4703,
						"acc_stderr,none": 0.20787652027961592,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8187910447761194,
						"acc_stderr,none": 0.15154488026568294,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.29494799405646366,
						"acc_norm,none": 0.29494799405646366,
						"acc_norm_stderr,none": 0.1253341487220105,
						"acc_stderr,none": 0.1253341487220105,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.27490934208254203,
						"acc_norm,none": 0.27490934208254203,
						"acc_norm_stderr,none": 0.04653107474437087,
						"acc_stderr,none": 0.04653107474437087,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.7763921911686604,
						"likelihood_diff_stderr,none": 0.4963909194332539,
						"pct_stereotype,none": 0.6049493142516398,
						"pct_stereotype_stderr,none": 0.07609615782995353
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05708661417322835,
						"exact_match_stderr,none": 0.0051481131263720215
					},
					"glue": {
						"acc,none": 0.4288649356836589,
						"acc_stderr,none": 0.0009025765607836904,
						"alias": "glue",
						"f1,none": 0.4093019840655304,
						"f1_stderr,none": 0.0014228569151280678,
						"mcc,none": -0.02808452109965501,
						"mcc_stderr,none": 0.028506340824708255
					},
					"kmmlu": {
						"acc,none": 0.2676003465203581,
						"acc_norm,none": 0.2676003465203581,
						"acc_norm_stderr,none": 0.034163260177736014,
						"acc_stderr,none": 0.034163260177736014,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5071256303442228,
						"acc_norm,none": 0.534,
						"acc_norm_stderr,none": 0.0004986853707414854,
						"acc_stderr,none": 0.03303345296030365,
						"alias": "kobest",
						"f1,none": 0.41979664442214615,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7085193091403066,
						"acc_stderr,none": 0.015247220853450627,
						"alias": "lambada",
						"perplexity,none": 3.7658176110470616,
						"perplexity_stderr,none": 0.1991207190229144
					},
					"lambada_cloze": {
						"acc,none": 0.0785949932078401,
						"acc_stderr,none": 0.0037502397922610326,
						"alias": "lambada_cloze",
						"perplexity,none": 120.91677248447283,
						"perplexity_stderr,none": 26.305404561724345
					},
					"lambada_multilingual": {
						"acc,none": 0.5075490005821851,
						"acc_stderr,none": 0.06904520559738755,
						"alias": "lambada_multilingual",
						"perplexity,none": 30.386892992110276,
						"perplexity_stderr,none": 9.196737138000033
					},
					"mmlu": {
						"acc,none": 0.4077054550633813,
						"acc_stderr,none": 0.09187048511250485,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.38746014877789586,
						"acc_stderr,none": 0.1014882719217198,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.4560669456066946,
						"acc_stderr,none": 0.06959799533941279,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4647383815404615,
						"acc_stderr,none": 0.07958262286944816,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.33460196638122425,
						"acc_stderr,none": 0.07292341967401174,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.3777146912704045,
						"acc_norm,none": 0.3366191038294915,
						"acc_norm_stderr,none": 0.00011728954031513654,
						"acc_stderr,none": 0.08904389567338657,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.41214285714285714,
						"acc_stderr,none": 0.05298357400023882,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7414992937390027,
						"acc_norm,none": 0.6543042593572382,
						"acc_norm_stderr,none": 0.009797090522454048,
						"acc_stderr,none": 0.14522400447304726,
						"alias": "pythia",
						"bits_per_byte,none": 0.5864058660635897,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5015014478739381,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.398195708351395,
						"perplexity_stderr,none": 0.06700599116380335,
						"word_perplexity,none": 8.78947151686651,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.44680851063829785,
						"acc_norm,none": 0.5070921985815603,
						"acc_norm_stderr,none": 0.07515845281612854,
						"acc_stderr,none": 0.044546290246168116,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.7656650361052877,
						"acc_stderr,none": 0.04167851001411332,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3204850251853695,
						"acc_stderr,none": 0.0014178002093164159,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.35006119951040393,
						"bleu_acc_stderr,none": 0.01669794942015103,
						"bleu_diff,none": -5.3500854414780425,
						"bleu_diff_stderr,none": 0.9804146767920648,
						"bleu_max,none": 31.00293500319662,
						"bleu_max_stderr,none": 0.8279911546400845,
						"rouge1_acc,none": 0.3353733170134639,
						"rouge1_acc_stderr,none": 0.01652753403966899,
						"rouge1_diff,none": -6.323106350558213,
						"rouge1_diff_stderr,none": 1.0926663608439762,
						"rouge1_max,none": 56.61671906198531,
						"rouge1_max_stderr,none": 0.8594703426587661,
						"rouge2_acc,none": 0.31211750305997554,
						"rouge2_acc_stderr,none": 0.016220756769520957,
						"rouge2_diff,none": -7.389501125042459,
						"rouge2_diff_stderr,none": 1.2810213690404875,
						"rouge2_max,none": 42.423677919851784,
						"rouge2_max_stderr,none": 1.0236591235927208,
						"rougeL_acc,none": 0.33659730722154224,
						"rougeL_acc_stderr,none": 0.0165424128094949,
						"rougeL_diff,none": -6.452481151621109,
						"rougeL_diff_stderr,none": 1.1016746550924312,
						"rougeL_max,none": 53.778756886336346,
						"rougeL_max_stderr,none": 0.8848956056120001
					},
					"xcopa": {
						"acc,none": 0.5667272727272729,
						"acc_stderr,none": 0.05251474454012673,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.39903614457831327,
						"acc_stderr,none": 0.05197241860273406,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5751158173395101,
						"acc_stderr,none": 0.07472708356841419,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7945605754102046,
						"acc_stderr,none": 0.04881325187450866,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6541713641488163,
						"acc_norm,none": 0.6510710259301015,
						"acc_norm_stderr,none": 0.09021379397510795,
						"acc_stderr,none": 0.10560147953049626,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3696875,
						"acc_stderr,none": 0.015653487597805493,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.359,
						"acc_stderr,none": 0.015177264224798597,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.371,
						"acc_stderr,none": 0.015283736211823187,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3775,
						"acc_stderr,none": 0.01399969468271862,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4308873720136519,
						"acc_norm,none": 0.46075085324232085,
						"acc_norm_stderr,none": 0.014566303676636581,
						"acc_stderr,none": 0.014471133392642473,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7643097643097643,
						"acc_norm,none": 0.7449494949494949,
						"acc_norm_stderr,none": 0.00894426590613072,
						"acc_stderr,none": 0.008709108323214466,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.4703,
						"acc_stderr,none": 0.20787652027961592,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.1835,
						"acc_stderr,none": 0.008657444812144987,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.007320163413216707,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.159,
						"acc_stderr,none": 0.008178810822683118,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.503,
						"acc_stderr,none": 0.01118293472280455,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.008008218639803045,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.439,
						"acc_stderr,none": 0.011099599116647334,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.6745,
						"acc_stderr,none": 0.010479970891894057,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.3625,
						"acc_stderr,none": 0.010751961557718986,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.4085,
						"acc_stderr,none": 0.010994285431808398,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.246,
						"acc_stderr,none": 0.009632673263279505,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.014316702819956615,
						"acc_stderr,none": 0.00247485048302597,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8187910447761194,
						"acc_stderr,none": 0.15154488026568294,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524277,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333368,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689079,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.782,
						"acc_stderr,none": 0.01306317904059528,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.01064016979249935,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.749,
						"acc_stderr,none": 0.013718133516888917,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.575,
						"acc_stderr,none": 0.015640320317040105,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.753,
						"acc_stderr,none": 0.01364467578131413,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.01070937396352803,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.002987963843142653,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910644,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897896,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832017,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697067,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787731,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491103,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099189,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816322,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.762,
						"acc_stderr,none": 0.013473586661967223,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.761,
						"acc_stderr,none": 0.013493000446937591,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366651,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427423,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.817,
						"acc_stderr,none": 0.012233587399477825,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298232,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.313,
						"acc_stderr,none": 0.01467127282297789,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996686,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425667,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.664,
						"acc_stderr,none": 0.014944140233795021,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.783,
						"acc_stderr,none": 0.01304151375727071,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427422,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753655,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942324,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904628,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.675,
						"acc_stderr,none": 0.014818724459095526,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992443,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.662,
						"acc_stderr,none": 0.01496596071022449,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.603,
						"acc_stderr,none": 0.015480007449307989,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.697,
						"acc_stderr,none": 0.014539683710535245,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.013699915608779773,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785129,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345693,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.01316983084342566,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.00896305396259208,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298627,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.838,
						"acc_stderr,none": 0.011657267771304417,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.014553205687950436,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.491,
						"acc_stderr,none": 0.015816736995005392,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639237,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785136,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448795,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.757,
						"acc_stderr,none": 0.013569640199177429,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.479,
						"acc_stderr,none": 0.015805341148131296,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.00944924802766273,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315158,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.01556091713692166,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103327,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992433,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.010605256784796575,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.011884495834541674,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315148,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.008125578442487909,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078166,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452372,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.273,
						"acc_stderr,none": 0.014095022868717598,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.231,
						"acc_stderr,none": 0.013334797216936435,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7779816513761468,
						"acc_stderr,none": 0.0072689494869396,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.44642857142857145,
						"acc_stderr,none": 0.06703189227942398,
						"alias": "cb",
						"f1,none": 0.37222222222222223,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.29494799405646366,
						"acc_norm,none": 0.29494799405646366,
						"acc_norm_stderr,none": 0.1253341487220105,
						"acc_stderr,none": 0.1253341487220105,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.06652247352247599,
						"acc_stderr,none": 0.06652247352247599,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.15151515151515152,
						"acc_norm,none": 0.15151515151515152,
						"acc_norm_stderr,none": 0.06338333534349055,
						"acc_stderr,none": 0.06338333534349055,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275463,
						"acc_stderr,none": 0.08124094920275463,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.10154334054280735,
						"acc_stderr,none": 0.10154334054280735,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.23404255319148937,
						"acc_norm,none": 0.23404255319148937,
						"acc_norm_stderr,none": 0.062426763436828826,
						"acc_stderr,none": 0.062426763436828826,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.34545454545454546,
						"acc_norm,none": 0.34545454545454546,
						"acc_norm_stderr,none": 0.06470956516382614,
						"acc_stderr,none": 0.06470956516382614,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.085947008518708,
						"acc_stderr,none": 0.085947008518708,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.3783783783783784,
						"acc_norm,none": 0.3783783783783784,
						"acc_norm_stderr,none": 0.08083044344561426,
						"acc_stderr,none": 0.08083044344561426,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.4827586206896552,
						"acc_norm,none": 0.4827586206896552,
						"acc_norm_stderr,none": 0.09443492370778725,
						"acc_stderr,none": 0.09443492370778725,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.35135135135135137,
						"acc_norm,none": 0.35135135135135137,
						"acc_norm_stderr,none": 0.0795654132101608,
						"acc_stderr,none": 0.0795654132101608,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.3225806451612903,
						"acc_norm,none": 0.3225806451612903,
						"acc_norm_stderr,none": 0.08534681648595455,
						"acc_stderr,none": 0.08534681648595455,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.3870967741935484,
						"acc_norm,none": 0.3870967741935484,
						"acc_norm_stderr,none": 0.08892934678767887,
						"acc_stderr,none": 0.08892934678767887,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.10094660663590604,
						"acc_stderr,none": 0.10094660663590604,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.1486470975026408,
						"acc_stderr,none": 0.1486470975026408,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755131,
						"acc_stderr,none": 0.08780518530755131,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.4782608695652174,
						"acc_norm,none": 0.4782608695652174,
						"acc_norm_stderr,none": 0.10649955403405122,
						"acc_stderr,none": 0.10649955403405122,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.16326530612244897,
						"acc_norm,none": 0.16326530612244897,
						"acc_norm_stderr,none": 0.053348255582850765,
						"acc_stderr,none": 0.053348255582850765,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.06957698714453991,
						"acc_stderr,none": 0.06957698714453991,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.43478260869565216,
						"acc_norm,none": 0.43478260869565216,
						"acc_norm_stderr,none": 0.07389883353033021,
						"acc_stderr,none": 0.07389883353033021,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.27490934208254203,
						"acc_norm,none": 0.27490934208254203,
						"acc_norm_stderr,none": 0.04653107474437087,
						"acc_stderr,none": 0.04653107474437087,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2958579881656805,
						"acc_norm,none": 0.2958579881656805,
						"acc_norm_stderr,none": 0.035214144124964784,
						"acc_stderr,none": 0.035214144124964784,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2635135135135135,
						"acc_norm,none": 0.2635135135135135,
						"acc_norm_stderr,none": 0.036335000433819875,
						"acc_stderr,none": 0.036335000433819875,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.03612181848191273,
						"acc_stderr,none": 0.03612181848191273,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.03501438706296781,
						"acc_stderr,none": 0.03501438706296781,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.291866028708134,
						"acc_norm,none": 0.291866028708134,
						"acc_norm_stderr,none": 0.03152229446041968,
						"acc_stderr,none": 0.03152229446041968,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.31297709923664124,
						"acc_norm,none": 0.31297709923664124,
						"acc_norm_stderr,none": 0.04066962905677697,
						"acc_stderr,none": 0.04066962905677697,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25735294117647056,
						"acc_norm,none": 0.25735294117647056,
						"acc_norm_stderr,none": 0.03762607496624008,
						"acc_stderr,none": 0.03762607496624008,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2803738317757009,
						"acc_norm,none": 0.2803738317757009,
						"acc_norm_stderr,none": 0.043628399335701,
						"acc_stderr,none": 0.043628399335701,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25386996904024767,
						"acc_norm,none": 0.25386996904024767,
						"acc_norm_stderr,none": 0.024254090252458033,
						"acc_stderr,none": 0.024254090252458033,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.030964517926923403,
						"acc_stderr,none": 0.030964517926923403,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.29608938547486036,
						"acc_norm,none": 0.29608938547486036,
						"acc_norm_stderr,none": 0.0342184375430487,
						"acc_stderr,none": 0.0342184375430487,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.028304657943035286,
						"acc_stderr,none": 0.028304657943035286,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.044801270921106716,
						"acc_stderr,none": 0.044801270921106716,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.2897196261682243,
						"acc_norm,none": 0.2897196261682243,
						"acc_norm_stderr,none": 0.0440606533474851,
						"acc_stderr,none": 0.0440606533474851,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.044801270921106716,
						"acc_stderr,none": 0.044801270921106716,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.32407407407407407,
						"acc_norm,none": 0.32407407407407407,
						"acc_norm_stderr,none": 0.04524596007030048,
						"acc_stderr,none": 0.04524596007030048,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.3238095238095238,
						"acc_norm,none": 0.3238095238095238,
						"acc_norm_stderr,none": 0.04588414718067473,
						"acc_stderr,none": 0.04588414718067473,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.04302548773959011,
						"acc_stderr,none": 0.04302548773959011,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.025546583236733544,
						"acc_stderr,none": 0.025546583236733544,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.27450980392156865,
						"acc_norm,none": 0.27450980392156865,
						"acc_norm_stderr,none": 0.031321798030832904,
						"acc_stderr,none": 0.031321798030832904,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.26900584795321636,
						"acc_norm,none": 0.26900584795321636,
						"acc_norm_stderr,none": 0.03401052620104088,
						"acc_stderr,none": 0.03401052620104088,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.26618705035971224,
						"acc_norm,none": 0.26618705035971224,
						"acc_norm_stderr,none": 0.03762240935089088,
						"acc_stderr,none": 0.03762240935089088,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.03652215878407507,
						"acc_stderr,none": 0.03652215878407507,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3067484662576687,
						"acc_norm,none": 0.3067484662576687,
						"acc_norm_stderr,none": 0.03623089915724148,
						"acc_stderr,none": 0.03623089915724148,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.23412698412698413,
						"acc_norm,none": 0.23412698412698413,
						"acc_norm_stderr,none": 0.026728048999302402,
						"acc_stderr,none": 0.026728048999302402,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.3434343434343434,
						"acc_norm,none": 0.3434343434343434,
						"acc_norm_stderr,none": 0.03383201223244441,
						"acc_stderr,none": 0.03383201223244441,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3319327731092437,
						"acc_norm,none": 0.3319327731092437,
						"acc_norm_stderr,none": 0.030588697013783663,
						"acc_stderr,none": 0.030588697013783663,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.23478260869565218,
						"acc_norm,none": 0.23478260869565218,
						"acc_norm_stderr,none": 0.028009647070930118,
						"acc_stderr,none": 0.028009647070930118,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03785714465066653,
						"acc_stderr,none": 0.03785714465066653,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.031678729656234944,
						"acc_stderr,none": 0.031678729656234944,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.21476510067114093,
						"acc_norm,none": 0.21476510067114093,
						"acc_norm_stderr,none": 0.03375598567590243,
						"acc_stderr,none": 0.03375598567590243,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676976,
						"acc_stderr,none": 0.03410167836676976,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.26515151515151514,
						"acc_norm,none": 0.26515151515151514,
						"acc_norm_stderr,none": 0.03856650735812559,
						"acc_stderr,none": 0.03856650735812559,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.03980329854920433,
						"acc_stderr,none": 0.03980329854920433,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.033047561588107864,
						"acc_stderr,none": 0.033047561588107864,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.041723430387053825,
						"acc_stderr,none": 0.041723430387053825,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.23776223776223776,
						"acc_norm,none": 0.23776223776223776,
						"acc_norm_stderr,none": 0.035725021418155686,
						"acc_stderr,none": 0.035725021418155686,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.3492063492063492,
						"acc_norm,none": 0.3492063492063492,
						"acc_norm_stderr,none": 0.04263906892795133,
						"acc_stderr,none": 0.04263906892795133,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.034510399895624946,
						"acc_stderr,none": 0.034510399895624946,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.26744186046511625,
						"acc_norm,none": 0.26744186046511625,
						"acc_norm_stderr,none": 0.03384836428157858,
						"acc_stderr,none": 0.03384836428157858,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.24817518248175183,
						"acc_norm,none": 0.24817518248175183,
						"acc_norm_stderr,none": 0.021332687690541908,
						"acc_stderr,none": 0.021332687690541908,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3644859813084112,
						"acc_norm,none": 0.3644859813084112,
						"acc_norm_stderr,none": 0.032977154614516745,
						"acc_stderr,none": 0.032977154614516745,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2601626016260163,
						"acc_norm,none": 0.2601626016260163,
						"acc_norm_stderr,none": 0.039720129754505354,
						"acc_stderr,none": 0.039720129754505354,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.22950819672131148,
						"acc_norm,none": 0.22950819672131148,
						"acc_norm_stderr,none": 0.03822877895195425,
						"acc_stderr,none": 0.03822877895195425,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2904761904761905,
						"acc_norm,none": 0.2904761904761905,
						"acc_norm_stderr,none": 0.03140260048069877,
						"acc_stderr,none": 0.03140260048069877,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2722222222222222,
						"acc_norm,none": 0.2722222222222222,
						"acc_norm_stderr,none": 0.03326861086666927,
						"acc_stderr,none": 0.03326861086666927,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.32275132275132273,
						"acc_norm,none": 0.32275132275132273,
						"acc_norm_stderr,none": 0.03409802097064963,
						"acc_stderr,none": 0.03409802097064963,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.28448275862068967,
						"acc_norm,none": 0.28448275862068967,
						"acc_norm_stderr,none": 0.04207160755584021,
						"acc_stderr,none": 0.04207160755584021,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.03565998174135303,
						"acc_stderr,none": 0.03565998174135303,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055042,
						"acc_stderr,none": 0.04232473532055042,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.03183348654463748,
						"acc_stderr,none": 0.03183348654463748,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2796208530805687,
						"acc_norm,none": 0.2796208530805687,
						"acc_norm_stderr,none": 0.030971033440870908,
						"acc_stderr,none": 0.030971033440870908,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.24468085106382978,
						"acc_norm,none": 0.24468085106382978,
						"acc_norm_stderr,none": 0.022199827758281308,
						"acc_stderr,none": 0.022199827758281308,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3017241379310345,
						"acc_norm,none": 0.3017241379310345,
						"acc_norm_stderr,none": 0.030200390075231464,
						"acc_stderr,none": 0.030200390075231464,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.29310344827586204,
						"acc_norm,none": 0.29310344827586204,
						"acc_norm_stderr,none": 0.034607110840412306,
						"acc_stderr,none": 0.034607110840412306,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.3037037037037037,
						"acc_norm,none": 0.3037037037037037,
						"acc_norm_stderr,none": 0.03972552884785136,
						"acc_stderr,none": 0.03972552884785136,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3008849557522124,
						"acc_norm,none": 0.3008849557522124,
						"acc_norm_stderr,none": 0.030576185297580976,
						"acc_stderr,none": 0.030576185297580976,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.03588624800091707,
						"acc_stderr,none": 0.03588624800091707,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.22702702702702704,
						"acc_norm,none": 0.22702702702702704,
						"acc_norm_stderr,none": 0.030882469702495,
						"acc_stderr,none": 0.030882469702495,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2781065088757396,
						"acc_norm,none": 0.2781065088757396,
						"acc_norm_stderr,none": 0.03456905430376243,
						"acc_stderr,none": 0.03456905430376243,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2795031055900621,
						"acc_norm,none": 0.2795031055900621,
						"acc_norm_stderr,none": 0.03547720390930392,
						"acc_stderr,none": 0.03547720390930392,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.34375,
						"acc_norm,none": 0.34375,
						"acc_norm_stderr,none": 0.03766668927755763,
						"acc_stderr,none": 0.03766668927755763,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.02808452109965501,
						"mcc_stderr,none": 0.028506340824708255
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.03379976689896308,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.7763921911686604,
						"likelihood_diff_stderr,none": 0.4963909194332539,
						"pct_stereotype,none": 0.6049493142516398,
						"pct_stereotype_stderr,none": 0.07609615782995353
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.715278516301955,
						"likelihood_diff_stderr,none": 0.09010134172061347,
						"pct_stereotype,none": 0.6618962432915921,
						"pct_stereotype_stderr,none": 0.011555345868611677
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.320723397391183,
						"likelihood_diff_stderr,none": 0.39930254357514194,
						"pct_stereotype,none": 0.7362637362637363,
						"pct_stereotype_stderr,none": 0.046449428524973954
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.169233842329546,
						"likelihood_diff_stderr,none": 2.0120495143716894,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.509718616192157,
						"likelihood_diff_stderr,none": 0.6842774762679692,
						"pct_stereotype,none": 0.676923076923077,
						"pct_stereotype_stderr,none": 0.05845647751373333
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.7296496272087096,
						"likelihood_diff_stderr,none": 0.16874399678274976,
						"pct_stereotype,none": 0.60625,
						"pct_stereotype_stderr,none": 0.027355258158219254
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.702464086038095,
						"likelihood_diff_stderr,none": 0.2301336124052135,
						"pct_stereotype,none": 0.5972222222222222,
						"pct_stereotype_stderr,none": 0.03344887382997866
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.94637229707506,
						"likelihood_diff_stderr,none": 0.3611564440240885,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.385220918129748,
						"likelihood_diff_stderr,none": 0.15393402552001478,
						"pct_stereotype,none": 0.610236220472441,
						"pct_stereotype_stderr,none": 0.021659366500228653
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.8637546504939997,
						"likelihood_diff_stderr,none": 0.35294870144279356,
						"pct_stereotype,none": 0.8198198198198198,
						"pct_stereotype_stderr,none": 0.036645138937259764
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.8752095212218585,
						"likelihood_diff_stderr,none": 0.5105266285028041,
						"pct_stereotype,none": 0.7849462365591398,
						"pct_stereotype_stderr,none": 0.04283507835554755
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.142213439941406,
						"likelihood_diff_stderr,none": 0.24136976642809568,
						"pct_stereotype,none": 0.7315789473684211,
						"pct_stereotype_stderr,none": 0.032233538609655915
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.837505866035366,
						"likelihood_diff_stderr,none": 0.09346298943020427,
						"pct_stereotype,none": 0.5480023852116875,
						"pct_stereotype_stderr,none": 0.012156884449033536
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.0551190270317927,
						"likelihood_diff_stderr,none": 0.27577786133729165,
						"pct_stereotype,none": 0.5222222222222223,
						"pct_stereotype_stderr,none": 0.05294752255076824
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.7797088623046875,
						"likelihood_diff_stderr,none": 0.7022386854381283,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.3860939488266455,
						"likelihood_diff_stderr,none": 0.5870636680333402,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.0584705346204686
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.6300731076629735,
						"likelihood_diff_stderr,none": 0.1945409148843855,
						"pct_stereotype,none": 0.573208722741433,
						"pct_stereotype_stderr,none": 0.027649620415261086
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.8744351986368653,
						"likelihood_diff_stderr,none": 0.24699858893422777,
						"pct_stereotype,none": 0.41106719367588934,
						"pct_stereotype_stderr,none": 0.03099481241536975
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.8046260939704046,
						"likelihood_diff_stderr,none": 0.5204112498658137,
						"pct_stereotype,none": 0.6388888888888888,
						"pct_stereotype_stderr,none": 0.05700381461700859
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.8283100542814834,
						"likelihood_diff_stderr,none": 0.18140971418874302,
						"pct_stereotype,none": 0.4652173913043478,
						"pct_stereotype_stderr,none": 0.023281462893244318
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.549168296482252,
						"likelihood_diff_stderr,none": 0.3676040166950708,
						"pct_stereotype,none": 0.6,
						"pct_stereotype_stderr,none": 0.04588314677411234
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.797582060426146,
						"likelihood_diff_stderr,none": 0.3366229364614782,
						"pct_stereotype,none": 0.7692307692307693,
						"pct_stereotype_stderr,none": 0.044411559168432764
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.258890872098962,
						"likelihood_diff_stderr,none": 0.2934312146841057,
						"pct_stereotype,none": 0.6785714285714286,
						"pct_stereotype_stderr,none": 0.03344434679897406
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05708661417322835,
						"exact_match_stderr,none": 0.0051481131263720215
					},
					"glue": {
						"acc,none": 0.4288649356836589,
						"acc_stderr,none": 0.0009025765607836904,
						"alias": "glue",
						"f1,none": 0.4093019840655304,
						"f1_stderr,none": 0.0014228569151280678,
						"mcc,none": -0.02808452109965501,
						"mcc_stderr,none": 0.028506340824708255
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.14025777103866566,
						"exact_match_stderr,get-answer": 0.00956510828142865
					},
					"hellaswag": {
						"acc,none": 0.5713005377414858,
						"acc_norm,none": 0.7597092212706632,
						"acc_norm_stderr,none": 0.004263868161042484,
						"acc_stderr,none": 0.004938787067611788,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2676003465203581,
						"acc_norm,none": 0.2676003465203581,
						"acc_norm_stderr,none": 0.034163260177736014,
						"acc_stderr,none": 0.034163260177736014,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.33,
						"acc_norm,none": 0.33,
						"acc_norm_stderr,none": 0.04725815626252605,
						"acc_stderr,none": 0.04725815626252605,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.01387777332977417,
						"acc_stderr,none": 0.01387777332977417,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.01394627184944048,
						"acc_stderr,none": 0.01394627184944048,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.014205696104091515,
						"acc_stderr,none": 0.014205696104091515,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740673,
						"acc_stderr,none": 0.014142984975740673,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.017450143624648643,
						"acc_stderr,none": 0.017450143624648643,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.236,
						"acc_norm,none": 0.236,
						"acc_norm_stderr,none": 0.013434451402438694,
						"acc_stderr,none": 0.013434451402438694,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.351,
						"acc_norm,none": 0.351,
						"acc_norm_stderr,none": 0.015100563798316405,
						"acc_stderr,none": 0.015100563798316405,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.013569640199177441,
						"acc_stderr,none": 0.013569640199177441,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.031652557907861936,
						"acc_stderr,none": 0.031652557907861936,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.294,
						"acc_norm,none": 0.294,
						"acc_norm_stderr,none": 0.014414290540008224,
						"acc_stderr,none": 0.014414290540008224,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.25384615384615383,
						"acc_norm,none": 0.25384615384615383,
						"acc_norm_stderr,none": 0.038318158508744996,
						"acc_stderr,none": 0.038318158508744996,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909282,
						"acc_stderr,none": 0.04292346959909282,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.199,
						"acc_norm,none": 0.199,
						"acc_norm_stderr,none": 0.012631649083099175,
						"acc_stderr,none": 0.012631649083099175,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.01432694179723156,
						"acc_stderr,none": 0.01432694179723156,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.241,
						"acc_norm,none": 0.241,
						"acc_norm_stderr,none": 0.013531522534515448,
						"acc_stderr,none": 0.013531522534515448,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.013394902889660009,
						"acc_stderr,none": 0.013394902889660009,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.269,
						"acc_norm,none": 0.269,
						"acc_norm_stderr,none": 0.014029819522568196,
						"acc_stderr,none": 0.014029819522568196,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.294,
						"acc_norm,none": 0.294,
						"acc_norm_stderr,none": 0.014414290540008222,
						"acc_stderr,none": 0.014414290540008222,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.013394902889660014,
						"acc_stderr,none": 0.013394902889660014,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.013895037677965131,
						"acc_stderr,none": 0.013895037677965131,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.044619604333847394,
						"acc_stderr,none": 0.044619604333847394,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.013877773329774164,
						"acc_stderr,none": 0.013877773329774164,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.297,
						"acc_norm,none": 0.297,
						"acc_norm_stderr,none": 0.0144568322948011,
						"acc_stderr,none": 0.0144568322948011,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.01428212095520047,
						"acc_stderr,none": 0.01428212095520047,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.01366318713487765,
						"acc_stderr,none": 0.01366318713487765,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.268,
						"acc_norm,none": 0.268,
						"acc_norm_stderr,none": 0.014013292702729468,
						"acc_stderr,none": 0.014013292702729468,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.01428212095520048,
						"acc_stderr,none": 0.01428212095520048,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.28833333333333333,
						"acc_norm,none": 0.28833333333333333,
						"acc_norm_stderr,none": 0.018508547058789335,
						"acc_stderr,none": 0.018508547058789335,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.358,
						"acc_norm,none": 0.358,
						"acc_norm_stderr,none": 0.015167928865407559,
						"acc_stderr,none": 0.015167928865407559,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.013644675781314123,
						"acc_stderr,none": 0.013644675781314123,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.232,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.013354937452281552,
						"acc_stderr,none": 0.013354937452281552,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.274,
						"acc_norm,none": 0.274,
						"acc_norm_stderr,none": 0.014111099288259581,
						"acc_stderr,none": 0.014111099288259581,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.044619604333847394,
						"acc_stderr,none": 0.044619604333847394,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.27666666666666667,
						"acc_norm,none": 0.27666666666666667,
						"acc_norm_stderr,none": 0.0258709313911235,
						"acc_stderr,none": 0.0258709313911235,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306265,
						"acc_stderr,none": 0.014158794845306265,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.228,
						"acc_norm,none": 0.228,
						"acc_norm_stderr,none": 0.013273740700804481,
						"acc_stderr,none": 0.013273740700804481,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.228,
						"acc_norm,none": 0.228,
						"acc_norm_stderr,none": 0.013273740700804481,
						"acc_stderr,none": 0.013273740700804481,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.031471451528433385,
						"acc_stderr,none": 0.031471451528433385,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.249,
						"acc_norm,none": 0.249,
						"acc_norm_stderr,none": 0.013681600278702296,
						"acc_stderr,none": 0.013681600278702296,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.294,
						"acc_norm,none": 0.294,
						"acc_norm_stderr,none": 0.014414290540008211,
						"acc_stderr,none": 0.014414290540008211,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.029832025555495228,
						"acc_stderr,none": 0.029832025555495228,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.314,
						"acc_norm,none": 0.314,
						"acc_norm_stderr,none": 0.014683991951087981,
						"acc_stderr,none": 0.014683991951087981,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5071256303442228,
						"acc_norm,none": 0.534,
						"acc_norm_stderr,none": 0.0004986853707414854,
						"acc_stderr,none": 0.03303345296030365,
						"alias": "kobest",
						"f1,none": 0.41979664442214615,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5242165242165242,
						"acc_stderr,none": 0.013333101802438085,
						"alias": " - kobest_boolq",
						"f1,none": 0.3871399445867531,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.564,
						"acc_stderr,none": 0.015689173023144074,
						"alias": " - kobest_copa",
						"f1,none": 0.5634342107371153,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.44,
						"acc_norm,none": 0.534,
						"acc_norm_stderr,none": 0.02233126442325838,
						"acc_stderr,none": 0.02222133153414305,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.437359653080645,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.44836272040302266,
						"acc_stderr,none": 0.02499159410984159,
						"alias": " - kobest_sentineg",
						"f1,none": 0.44270522854450706,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604038,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7085193091403066,
						"acc_stderr,none": 0.015247220853450627,
						"alias": "lambada",
						"perplexity,none": 3.7658176110470616,
						"perplexity_stderr,none": 0.1991207190229144
					},
					"lambada_cloze": {
						"acc,none": 0.0785949932078401,
						"acc_stderr,none": 0.0037502397922610326,
						"alias": "lambada_cloze",
						"perplexity,none": 120.91677248447283,
						"perplexity_stderr,none": 26.305404561724345
					},
					"lambada_multilingual": {
						"acc,none": 0.5075490005821851,
						"acc_stderr,none": 0.06904520559738755,
						"alias": "lambada_multilingual",
						"perplexity,none": 30.386892992110276,
						"perplexity_stderr,none": 9.196737138000033
					},
					"lambada_openai": {
						"acc,none": 0.7352998253444596,
						"acc_stderr,none": 0.006146408462993574,
						"alias": " - lambada_openai",
						"perplexity,none": 3.398195708351395,
						"perplexity_stderr,none": 0.06700599116380335
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.0784009314962158,
						"acc_stderr,none": 0.0037449299431192777,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 68.89170045971302,
						"perplexity_stderr,none": 1.8188074217904098
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3968562002716864,
						"acc_stderr,none": 0.006816149253065397,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 48.28191745048307,
						"perplexity_stderr,none": 2.80637964040322
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.736270133902581,
						"acc_stderr,none": 0.006139179363569852,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3997872229103083,
						"perplexity_stderr,none": 0.06685204814579437
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.43780322142441297,
						"acc_stderr,none": 0.006911872616149973,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 39.3529650745979,
						"perplexity_stderr,none": 2.05049895245314
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5076654376091597,
						"acc_stderr,none": 0.006965158982245363,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 25.578664979226993,
						"perplexity_stderr,none": 1.3452962676444224
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.45915000970308556,
						"acc_stderr,none": 0.006942690175543841,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 35.32113023333311,
						"perplexity_stderr,none": 1.994998639906734
					},
					"lambada_standard": {
						"acc,none": 0.6807684843780322,
						"acc_stderr,none": 0.006494783427738682,
						"alias": " - lambada_standard",
						"perplexity,none": 4.134488521537974,
						"perplexity_stderr,none": 0.08274211347540815
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.07878905491946439,
						"acc_stderr,none": 0.003753397522618996,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 172.94184450923265,
						"perplexity_stderr,none": 5.216141734069116
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.26653944020356235,
						"exact_match_stderr,get-answer": 0.011155294262477033
					},
					"logiqa": {
						"acc,none": 0.2534562211981567,
						"acc_norm,none": 0.30414746543778803,
						"acc_norm_stderr,none": 0.01804446579150677,
						"acc_stderr,none": 0.017061705439785732,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.25636132315521626,
						"acc_norm,none": 0.30279898218829515,
						"acc_norm_stderr,none": 0.011592260158888729,
						"acc_stderr,none": 0.011015878683092601,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2800670016750419,
						"acc_norm,none": 0.2814070351758794,
						"acc_norm_stderr,none": 0.008232079320325332,
						"acc_stderr,none": 0.00822010947706588,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.4246981571700911,
						"acc_stderr,none": 0.0050872030462581,
						"alias": "mc_taco",
						"f1,none": 0.5039269406392695,
						"f1_stderr,none": 0.005819432193678705
					},
					"medmcqa": {
						"acc,none": 0.3449677265120727,
						"acc_norm,none": 0.3449677265120727,
						"acc_norm_stderr,none": 0.007350697793603448,
						"acc_stderr,none": 0.007350697793603448,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.32128829536527886,
						"acc_norm,none": 0.32128829536527886,
						"acc_norm_stderr,none": 0.013093223036605022,
						"acc_stderr,none": 0.013093223036605022,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.4077054550633813,
						"acc_stderr,none": 0.09187048511250485,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.04232073695151589,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.4144736842105263,
						"acc_stderr,none": 0.04008973785779206,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.04988876515698589,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4075471698113208,
						"acc_stderr,none": 0.030242233800854494,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4305555555555556,
						"acc_stderr,none": 0.04140685639111502,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695236,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3988439306358382,
						"acc_stderr,none": 0.037336266553835096,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.04280105837364395,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.050161355804659205,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3872340425531915,
						"acc_stderr,none": 0.03184389265339526,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2807017543859649,
						"acc_stderr,none": 0.042270544512322,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.43448275862068964,
						"acc_stderr,none": 0.04130740879555497,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.23809523809523808,
						"acc_stderr,none": 0.021935878081184752,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2619047619047619,
						"acc_stderr,none": 0.039325376803928704,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.4096774193548387,
						"acc_stderr,none": 0.027976054915347364,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.3103448275862069,
						"acc_stderr,none": 0.03255086769970103,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5757575757575758,
						"acc_stderr,none": 0.038592681420702636,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.41414141414141414,
						"acc_stderr,none": 0.03509438348879629,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5077720207253886,
						"acc_stderr,none": 0.03608003225569653,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3871794871794872,
						"acc_stderr,none": 0.02469721693087894,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24814814814814815,
						"acc_stderr,none": 0.026335739404055803,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3445378151260504,
						"acc_stderr,none": 0.030868682604121626,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.26490066225165565,
						"acc_stderr,none": 0.03603038545360385,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5431192660550459,
						"acc_stderr,none": 0.021357458785226206,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.28703703703703703,
						"acc_stderr,none": 0.030851992993257013,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5392156862745098,
						"acc_stderr,none": 0.03498501649369527,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5738396624472574,
						"acc_stderr,none": 0.03219035703131774,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.45739910313901344,
						"acc_stderr,none": 0.033435777055830646,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5648854961832062,
						"acc_stderr,none": 0.04348208051644858,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.38746014877789586,
						"acc_stderr,none": 0.1014882719217198,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.5867768595041323,
						"acc_stderr,none": 0.04495087843548408,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5092592592592593,
						"acc_stderr,none": 0.04832853553437056,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.50920245398773,
						"acc_stderr,none": 0.03927705600787443,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.4017857142857143,
						"acc_stderr,none": 0.046533331469736455,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.42718446601941745,
						"acc_stderr,none": 0.04897957737781168,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6025641025641025,
						"acc_stderr,none": 0.032059534537892925,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.04988876515698589,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5312899106002554,
						"acc_stderr,none": 0.017844918090468547,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.41329479768786126,
						"acc_stderr,none": 0.026511261369409247,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24022346368715083,
						"acc_stderr,none": 0.014288343803925307,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.4803921568627451,
						"acc_stderr,none": 0.028607893699576063,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.4560669456066946,
						"acc_stderr,none": 0.06959799533941279,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4855305466237942,
						"acc_stderr,none": 0.02838619808417768,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4382716049382716,
						"acc_stderr,none": 0.027607914087400487,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.33687943262411346,
						"acc_stderr,none": 0.02819553487396673,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3239895697522816,
						"acc_stderr,none": 0.011952840809646577,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4117647058823529,
						"acc_stderr,none": 0.029896163033125478,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.4199346405228758,
						"acc_stderr,none": 0.019966811178256487,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4090909090909091,
						"acc_stderr,none": 0.04709306978661896,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.4857142857142857,
						"acc_stderr,none": 0.03199615232806287,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4647383815404615,
						"acc_stderr,none": 0.07958262286944816,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.6417910447761194,
						"acc_stderr,none": 0.03390393042268814,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.33460196638122425,
						"acc_stderr,none": 0.07292341967401174,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.40963855421686746,
						"acc_stderr,none": 0.03828401115079023,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.5380116959064327,
						"acc_stderr,none": 0.03823727092882307,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.42689760570555274,
						"acc_stderr,none": 0.004992923869426006,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.4394833197721725,
						"acc_stderr,none": 0.005005720777867014,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6936274509803921,
						"acc_stderr,none": 0.022850244770264948,
						"alias": "mrpc",
						"f1,none": 0.8164464023494861,
						"f1_stderr,none": 0.016176785503530685
					},
					"multimedqa": {
						"acc,none": 0.3777146912704045,
						"acc_norm,none": 0.3366191038294915,
						"acc_norm_stderr,none": 0.00011728954031513654,
						"acc_stderr,none": 0.08904389567338657,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5699257425742574,
						"acc_stderr,none": 0.007111223871933902,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7087095560571859,
						"mrr_stderr,none": 0.010277044267218713,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407435,
						"r@2,none": 0.42776523702031605,
						"r@2_stderr,none": 0.01663099478654635
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6537810383747173,
						"mrr_stderr,none": 0.010448966335382708,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.01473704740275095,
						"r@2,none": 0.45936794582392776,
						"r@2_stderr,none": 0.01675172766782549
					},
					"openbookqa": {
						"acc,none": 0.316,
						"acc_norm,none": 0.442,
						"acc_norm_stderr,none": 0.02223197069632112,
						"acc_stderr,none": 0.020812359515855854,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.3605,
						"acc_stderr,none": 0.010739066010104796,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.292,
						"acc_stderr,none": 0.010169548163754637,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.364,
						"acc_stderr,none": 0.010761501054800693,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.454,
						"acc_stderr,none": 0.011135708419359805,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.507,
						"acc_stderr,none": 0.011182040020027767,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.4745,
						"acc_stderr,none": 0.011168582883330074,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.433,
						"acc_stderr,none": 0.011082279027990142,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.41214285714285714,
						"acc_stderr,none": 0.05298357400023882,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.780739934711643,
						"acc_norm,none": 0.7899891186071817,
						"acc_norm_stderr,none": 0.009503353305818571,
						"acc_stderr,none": 0.009653357463605338,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.23719043552519214,
						"acc_norm,none": 0.2775939368061486,
						"acc_norm_stderr,none": 0.003271665018993746,
						"acc_stderr,none": 0.0031076335572522056,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177562,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7414992937390027,
						"acc_norm,none": 0.6543042593572382,
						"acc_norm_stderr,none": 0.009797090522454048,
						"acc_stderr,none": 0.14522400447304726,
						"alias": "pythia",
						"bits_per_byte,none": 0.5864058660635897,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5015014478739381,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.398195708351395,
						"perplexity_stderr,none": 0.06700599116380335,
						"word_perplexity,none": 8.78947151686651,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.44680851063829785,
						"acc_norm,none": 0.5070921985815603,
						"acc_norm_stderr,none": 0.07515845281612854,
						"acc_stderr,none": 0.044546290246168116,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.5083333333333333,
						"acc_norm,none": 0.6416666666666667,
						"acc_norm_stderr,none": 0.0439566780192005,
						"acc_stderr,none": 0.045828558447483604,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.44375,
						"acc_norm,none": 0.55625,
						"acc_norm_stderr,none": 0.039400853796259426,
						"acc_stderr,none": 0.03940085379625942,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.4225352112676056,
						"acc_norm,none": 0.4225352112676056,
						"acc_norm_stderr,none": 0.029363038140788528,
						"acc_stderr,none": 0.029363038140788538,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.49899322716456157,
						"acc_stderr,none": 0.006765396837036608,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.4117487014593124,
						"acc_stderr,none": 0.0024476598653283732,
						"alias": "qqp",
						"f1,none": 0.40579637726420986,
						"f1_stderr,none": 0.00310971923616114
					},
					"race": {
						"acc,none": 0.39425837320574164,
						"acc_stderr,none": 0.015124600889668079,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.631768953068592,
						"acc_stderr,none": 0.02903252442802371,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.939,
						"acc_norm,none": 0.911,
						"acc_norm_stderr,none": 0.009008893392651492,
						"acc_stderr,none": 0.007572076091557422,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.631768953068592,
						"acc_stderr,none": 0.02903252442802371,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.4954128440366973,
						"acc_stderr,none": 0.016941140693324253,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5657802659202239,
						"acc_norm,none": 0.7671698490452864,
						"acc_norm_stderr,none": 0.0029881066568489736,
						"acc_stderr,none": 0.0035043655183714744,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.7656650361052877,
						"acc_stderr,none": 0.04167851001411332,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.8048878205128205,
						"acc_stderr,none": 0.003966243125932171,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8370325326847066,
						"acc_stderr,none": 0.0037183568919741624,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.658235294117647,
						"acc_stderr,none": 0.004696511522605023,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3204850251853695,
						"acc_stderr,none": 0.0014178002093164159,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.35006119951040393,
						"bleu_acc_stderr,none": 0.01669794942015103,
						"bleu_diff,none": -5.3500854414780425,
						"bleu_diff_stderr,none": 0.9804146767920648,
						"bleu_max,none": 31.00293500319662,
						"bleu_max_stderr,none": 0.8279911546400845,
						"rouge1_acc,none": 0.3353733170134639,
						"rouge1_acc_stderr,none": 0.01652753403966899,
						"rouge1_diff,none": -6.323106350558213,
						"rouge1_diff_stderr,none": 1.0926663608439762,
						"rouge1_max,none": 56.61671906198531,
						"rouge1_max_stderr,none": 0.8594703426587661,
						"rouge2_acc,none": 0.31211750305997554,
						"rouge2_acc_stderr,none": 0.016220756769520957,
						"rouge2_diff,none": -7.389501125042459,
						"rouge2_diff_stderr,none": 1.2810213690404875,
						"rouge2_max,none": 42.423677919851784,
						"rouge2_max_stderr,none": 1.0236591235927208,
						"rougeL_acc,none": 0.33659730722154224,
						"rougeL_acc_stderr,none": 0.0165424128094949,
						"rougeL_diff,none": -6.452481151621109,
						"rougeL_diff_stderr,none": 1.1016746550924312,
						"rougeL_max,none": 53.778756886336346,
						"rougeL_max_stderr,none": 0.8848956056120001
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.35006119951040393,
						"bleu_acc_stderr,none": 0.01669794942015103,
						"bleu_diff,none": -5.3500854414780425,
						"bleu_diff_stderr,none": 0.9804146767920648,
						"bleu_max,none": 31.00293500319662,
						"bleu_max_stderr,none": 0.8279911546400845,
						"rouge1_acc,none": 0.3353733170134639,
						"rouge1_acc_stderr,none": 0.01652753403966899,
						"rouge1_diff,none": -6.323106350558213,
						"rouge1_diff_stderr,none": 1.0926663608439762,
						"rouge1_max,none": 56.61671906198531,
						"rouge1_max_stderr,none": 0.8594703426587661,
						"rouge2_acc,none": 0.31211750305997554,
						"rouge2_acc_stderr,none": 0.016220756769520957,
						"rouge2_diff,none": -7.389501125042459,
						"rouge2_diff_stderr,none": 1.2810213690404875,
						"rouge2_max,none": 42.423677919851784,
						"rouge2_max_stderr,none": 1.0236591235927208,
						"rougeL_acc,none": 0.33659730722154224,
						"rougeL_acc_stderr,none": 0.0165424128094949,
						"rougeL_diff,none": -6.452481151621109,
						"rougeL_diff_stderr,none": 1.1016746550924312,
						"rougeL_max,none": 53.778756886336346,
						"rougeL_max_stderr,none": 0.8848956056120001
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25091799265605874,
						"acc_stderr,none": 0.0151769850277077,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.39005255201736655,
						"acc_stderr,none": 0.013573369468181155,
						"alias": "truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.05708661417322835,
						"exact_match_stderr,none": 0.0051481131263720215
					},
					"wic": {
						"acc,none": 0.49843260188087773,
						"acc_stderr,none": 0.019810623954060382,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.5864058660635897,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5015014478739381,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 8.78947151686651,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.691397000789266,
						"acc_stderr,none": 0.012982160200926584,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.05961305784972239,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.375,
						"acc_stderr,none": 0.04770204856076104,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7948717948717948,
						"acc_stderr,none": 0.024483684888005928,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5667272727272729,
						"acc_stderr,none": 0.05251474454012673,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.488,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.626,
						"acc_stderr,none": 0.021660710347204484,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.664,
						"acc_stderr,none": 0.02114479142504885,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.02233126442325838,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.562,
						"acc_stderr,none": 0.022210326363977417,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.02227969410784342,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.626,
						"acc_stderr,none": 0.021660710347204484,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.648,
						"acc_stderr,none": 0.02138004238594605,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.39903614457831327,
						"acc_stderr,none": 0.05197241860273406,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.348995983935743,
						"acc_stderr,none": 0.009554095988300688,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.41124497991967873,
						"acc_stderr,none": 0.009862912223544639,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.45582329317269077,
						"acc_stderr,none": 0.009982878443738422,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3795180722891566,
						"acc_stderr,none": 0.00972676337283714,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5630522088353414,
						"acc_stderr,none": 0.009942066394610854,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.41164658634538154,
						"acc_stderr,none": 0.00986436082175034,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4883534136546185,
						"acc_stderr,none": 0.010019353650807708,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3819277108433735,
						"acc_stderr,none": 0.009738627914517515,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.41526104417670684,
						"acc_stderr,none": 0.009877093420328584,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3506024096385542,
						"acc_stderr,none": 0.009564237156206103,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.35582329317269074,
						"acc_stderr,none": 0.009596375814335287,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3686746987951807,
						"acc_stderr,none": 0.009670208010505244,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.009460223484996469,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3710843373493976,
						"acc_stderr,none": 0.009683226021349281,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3485943775100402,
						"acc_stderr,none": 0.009551542053301821,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5751158173395101,
						"acc_stderr,none": 0.07472708356841419,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.49768365320979485,
						"acc_stderr,none": 0.012866987239478045,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7696889477167439,
						"acc_stderr,none": 0.010834940764817839,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6730641958967571,
						"acc_stderr,none": 0.012071771683911353,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5023163467902052,
						"acc_stderr,none": 0.012866987239478045,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5360688285903376,
						"acc_stderr,none": 0.012833602406620017,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5936465916611515,
						"acc_stderr,none": 0.012639429420389871,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.47915287888815355,
						"acc_stderr,none": 0.012855936282881267,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.628060886829914,
						"acc_stderr,none": 0.01243793623520203,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5069490403706155,
						"acc_stderr,none": 0.012865882570960722,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5420251489080079,
						"acc_stderr,none": 0.012821595164245275,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5976174718729318,
						"acc_stderr,none": 0.012619516819528718,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7945605754102046,
						"acc_stderr,none": 0.04881325187450866,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8804301075268817,
						"acc_stderr,none": 0.006730391078162498,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6626506024096386,
						"acc_stderr,none": 0.05221260262032129,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7038581856100105,
						"acc_stderr,none": 0.014750600290668364,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7338403041825095,
						"acc_stderr,none": 0.027303685972946627,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6825396825396826,
						"acc_stderr,none": 0.026269018848607696,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6944444444444444,
						"acc_stderr,none": 0.020539026423151478,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "meta-llama/Llama-2-7b-hf"
	},
	"microsoft/phi-1": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.31228861330326946,
						"acc_norm,none": 0.3086245772266065,
						"acc_norm_stderr,none": 0.03755617597447246,
						"acc_stderr,none": 0.05401455594666426,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3009375,
						"acc_stderr,none": 0.015193491890194722,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.02185,
						"acc_stderr,none": 0.024499892050353748,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.7167611940298507,
						"acc_stderr,none": 0.14959619372952507,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.24294205052005943,
						"acc_norm,none": 0.24294205052005943,
						"acc_norm_stderr,none": 0.11380451718630548,
						"acc_stderr,none": 0.11380451718630548,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2504748748057331,
						"acc_norm,none": 0.2504748748057331,
						"acc_norm_stderr,none": 0.03603890385050036,
						"acc_stderr,none": 0.03603890385050036,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 7.2762197666247825,
						"likelihood_diff_stderr,none": 1.5405441062216259,
						"pct_stereotype,none": 0.4669051878354204,
						"pct_stereotype_stderr,none": 0.07361618986038487
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.002952755905511811,
						"exact_match_stderr,none": 0.0012039728135357858
					},
					"glue": {
						"acc,none": 0.44440788068071435,
						"acc_stderr,none": 0.02486305485824581,
						"alias": "glue",
						"f1,none": 0.49602615371488135,
						"f1_stderr,none": 0.00022628131485044538,
						"mcc,none": -0.04427163276171355,
						"mcc_stderr,none": 0.0008142528849337667
					},
					"kmmlu": {
						"acc,none": 0.10141495812879006,
						"acc_norm,none": 0.10141495812879006,
						"acc_norm_stderr,none": 0.0636223216843437,
						"acc_stderr,none": 0.0636223216843437,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.47226485419864067,
						"acc_norm,none": 0.422,
						"acc_norm_stderr,none": 0.0004888096192384763,
						"acc_stderr,none": 0.042406016547196064,
						"alias": "kobest",
						"f1,none": 0.3611138434607754,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.09596351639821463,
						"acc_stderr,none": 0.006615671208021231,
						"alias": "lambada",
						"perplexity,none": 2685.506801586333,
						"perplexity_stderr,none": 328.29262962059437
					},
					"lambada_cloze": {
						"acc,none": 0.015330875218319426,
						"acc_stderr,none": 0.006159478702424399,
						"alias": "lambada_cloze",
						"perplexity,none": 45681.71788121615,
						"perplexity_stderr,none": 8761.14358133287
					},
					"lambada_multilingual": {
						"acc,none": 0.05344459538133126,
						"acc_stderr,none": 0.015997797491782328,
						"alias": "lambada_multilingual",
						"perplexity,none": 604484.5857821594,
						"perplexity_stderr,none": 264539.52930330223
					},
					"mmlu": {
						"acc,none": 0.2519584104828372,
						"acc_stderr,none": 0.03696200615821548,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.251009564293305,
						"acc_stderr,none": 0.02560891633259949,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.26874798841326036,
						"acc_stderr,none": 0.040250632694020225,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.24666883327916803,
						"acc_stderr,none": 0.03167665564890943,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.24199175388518868,
						"acc_stderr,none": 0.04810567086298941,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.26430092264017035,
						"acc_norm,none": 0.2403285730488725,
						"acc_norm_stderr,none": 9.966281611173883e-05,
						"acc_stderr,none": 0.0633511490625171,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.49842857142857144,
						"acc_stderr,none": 0.028480163371254853,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.5895999175269849,
						"acc_norm,none": 0.31729613914072274,
						"acc_norm_stderr,none": 0.003787079937645793,
						"acc_stderr,none": 0.18426968914343322,
						"alias": "pythia",
						"bits_per_byte,none": 1.5594074450869237,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 2.9473276366452805,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 2122.0844159549133,
						"perplexity_stderr,none": 134.23442502928177,
						"word_perplexity,none": 323.77647124335505,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.25354609929078015,
						"acc_norm,none": 0.2978723404255319,
						"acc_norm_stderr,none": 0.03532668733253244,
						"acc_stderr,none": 0.03736129272535806,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.4558583740973678,
						"acc_stderr,none": 0.03512410550577622,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3541550349800609,
						"acc_stderr,none": 0.06046304832478659,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.0002590774867436336,
						"bleu_diff,none": -3.058967803850556,
						"bleu_diff_stderr,none": 0.21510156755834858,
						"bleu_max,none": 15.376870587650536,
						"bleu_max_stderr,none": 0.3363566486620398,
						"rouge1_acc,none": 0.29498164014687883,
						"rouge1_acc_stderr,none": 0.00025486209819011995,
						"rouge1_diff,none": -5.235627921489654,
						"rouge1_diff_stderr,none": 0.3530098275098548,
						"rouge1_max,none": 38.26302707107735,
						"rouge1_max_stderr,none": 0.5606026731446057,
						"rouge2_acc,none": 0.2141982864137087,
						"rouge2_acc_stderr,none": 0.0002062712996460027,
						"rouge2_diff,none": -5.338754231297163,
						"rouge2_diff_stderr,none": 0.40032250880397335,
						"rouge2_max,none": 21.28898467311369,
						"rouge2_max_stderr,none": 0.6258049586978924,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.0002485279995361043,
						"rougeL_diff,none": -5.223026841958686,
						"rougeL_diff_stderr,none": 0.3253340484774495,
						"rougeL_max,none": 35.181796595837234,
						"rougeL_max_stderr,none": 0.5587965905722869
					},
					"xcopa": {
						"acc,none": 0.5176363636363636,
						"acc_stderr,none": 0.02961243699532362,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.34125836680053545,
						"acc_stderr,none": 0.019307682076867023,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.48571084772276035,
						"acc_stderr,none": 0.01917668870742635,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.5183187233086087,
						"acc_stderr,none": 0.02177253452137737,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.31228861330326946,
						"acc_norm,none": 0.3086245772266065,
						"acc_norm_stderr,none": 0.03755617597447246,
						"acc_stderr,none": 0.05401455594666426,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3009375,
						"acc_stderr,none": 0.015193491890194722,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.299,
						"acc_stderr,none": 0.014484778521220466,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.313,
						"acc_stderr,none": 0.014671272822977886,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.2925,
						"acc_stderr,none": 0.01313761606215125,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.19965870307167236,
						"acc_norm,none": 0.23208191126279865,
						"acc_norm_stderr,none": 0.012336718284948856,
						"acc_stderr,none": 0.011681625756888676,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.36784511784511786,
						"acc_norm,none": 0.3463804713804714,
						"acc_norm_stderr,none": 0.009763542075695734,
						"acc_stderr,none": 0.009894923464455186,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.02185,
						"acc_stderr,none": 0.024499892050353748,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0965,
						"acc_stderr,none": 0.006604217049841631,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0355,
						"acc_stderr,none": 0.004138651860160545,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.041,
						"acc_stderr,none": 0.004435012363830994,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.038,
						"acc_stderr,none": 0.00427634698917031,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521454,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0055,
						"acc_stderr,none": 0.0016541593398342208,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000151,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.015618221258134491,
						"acc_stderr,none": 0.0025831898836907665,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.7167611940298507,
						"acc_stderr,none": 0.14959619372952507,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.724,
						"acc_stderr,none": 0.014142984975740668,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.015680876566375058,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.0122851913263867,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.764,
						"acc_stderr,none": 0.013434451402438699,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341678,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.015090650341444235,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.491,
						"acc_stderr,none": 0.015816736995005392,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.734,
						"acc_stderr,none": 0.013979965645145183,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.746,
						"acc_stderr,none": 0.01377220656516854,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963528031,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.01202162715773197,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.82,
						"acc_stderr,none": 0.01215515313551196,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.011569479368271308,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.767,
						"acc_stderr,none": 0.013374972519220056,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.013106173040661782,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662753,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.788,
						"acc_stderr,none": 0.01293148186493803,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.01447084674113472,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.759,
						"acc_stderr,none": 0.013531522534515434,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.013607356839598114,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621223,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.01449862787336143,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389626,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.453,
						"acc_stderr,none": 0.015749255189977593,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.705,
						"acc_stderr,none": 0.014428554438445504,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.693,
						"acc_stderr,none": 0.014593284892852618,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.601,
						"acc_stderr,none": 0.015493193313162908,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.014442734941575018,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.013912208651021349,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.763,
						"acc_stderr,none": 0.013454070462577957,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.829,
						"acc_stderr,none": 0.011912216456264604,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.778,
						"acc_stderr,none": 0.013148721948877364,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.382,
						"acc_stderr,none": 0.015372453034968526,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.733,
						"acc_stderr,none": 0.013996674851796285,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.579,
						"acc_stderr,none": 0.015620595475301318,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.503,
						"acc_stderr,none": 0.015819015179246724,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.464,
						"acc_stderr,none": 0.015778243024904586,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.0077997330618320045,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.667,
						"acc_stderr,none": 0.014910846164229857,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366657,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.012795613612786553,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.01507060460376841,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695459,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.765,
						"acc_stderr,none": 0.013414729030247123,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.013127502859696235,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.602,
						"acc_stderr,none": 0.015486634102858917,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.493,
						"acc_stderr,none": 0.01581774956184357,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.444,
						"acc_stderr,none": 0.015719768163402086,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.819,
						"acc_stderr,none": 0.012181436179177904,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.734,
						"acc_stderr,none": 0.013979965645145163,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.01031821038094609,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.478,
						"acc_stderr,none": 0.01580397942816195,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.485,
						"acc_stderr,none": 0.015812179641814902,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397334,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.014470846741134713,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.609,
						"acc_stderr,none": 0.015438826294681782,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.719,
						"acc_stderr,none": 0.014221154708434939,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.731,
						"acc_stderr,none": 0.014029819522568196,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.01581119837311488,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.773,
						"acc_stderr,none": 0.013253174964763914,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837957,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357798,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315143,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491125,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.415,
						"acc_stderr,none": 0.015589035185604632,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.274,
						"acc_stderr,none": 0.014111099288259583,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.45168195718654436,
						"acc_stderr,none": 0.00870412620615935,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.48214285714285715,
						"acc_stderr,none": 0.0673769750864465,
						"alias": "cb",
						"f1,none": 0.30844645550527905,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.24294205052005943,
						"acc_norm,none": 0.24294205052005943,
						"acc_norm_stderr,none": 0.11380451718630548,
						"acc_stderr,none": 0.11380451718630548,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.3939393939393939,
						"acc_norm,none": 0.3939393939393939,
						"acc_norm_stderr,none": 0.08637692614387409,
						"acc_stderr,none": 0.08637692614387409,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.12121212121212122,
						"acc_norm,none": 0.12121212121212122,
						"acc_norm_stderr,none": 0.0576952508019993,
						"acc_stderr,none": 0.0576952508019993,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0879391124952055,
						"acc_stderr,none": 0.0879391124952055,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.06595297051445338,
						"acc_stderr,none": 0.06595297051445338,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.38181818181818183,
						"acc_norm,none": 0.38181818181818183,
						"acc_norm_stderr,none": 0.06611340675536795,
						"acc_stderr,none": 0.06611340675536795,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.0686105685212965,
						"acc_stderr,none": 0.0686105685212965,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.4375,
						"acc_norm,none": 0.4375,
						"acc_norm_stderr,none": 0.128086884574495,
						"acc_stderr,none": 0.128086884574495,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.1891891891891892,
						"acc_norm,none": 0.1891891891891892,
						"acc_norm_stderr,none": 0.06527647182968216,
						"acc_stderr,none": 0.06527647182968216,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063838,
						"acc_stderr,none": 0.07213122508063838,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.09176629354822471,
						"acc_stderr,none": 0.09176629354822471,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.3888888888888889,
						"acc_norm,none": 0.3888888888888889,
						"acc_norm_stderr,none": 0.11823563735376173,
						"acc_stderr,none": 0.11823563735376173,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.060073850409370216,
						"acc_stderr,none": 0.060073850409370216,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755131,
						"acc_stderr,none": 0.08780518530755131,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.09176629354822471,
						"acc_stderr,none": 0.09176629354822471,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.0914486154730632,
						"acc_stderr,none": 0.0914486154730632,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.0652050663696626,
						"acc_stderr,none": 0.0652050663696626,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.05555555555555555,
						"acc_norm,none": 0.05555555555555555,
						"acc_norm_stderr,none": 0.055555555555555566,
						"acc_stderr,none": 0.055555555555555566,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.08742975048915691,
						"acc_stderr,none": 0.08742975048915691,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0679170334216026,
						"acc_stderr,none": 0.0679170334216026,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.05650315562208096,
						"acc_stderr,none": 0.05650315562208096,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764435,
						"acc_stderr,none": 0.09361833424764435,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2504748748057331,
						"acc_norm,none": 0.2504748748057331,
						"acc_norm_stderr,none": 0.03603890385050036,
						"acc_stderr,none": 0.03603890385050036,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.033341501981019636,
						"acc_stderr,none": 0.033341501981019636,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.03541088558070897,
						"acc_stderr,none": 0.03541088558070897,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2535885167464115,
						"acc_norm,none": 0.2535885167464115,
						"acc_norm_stderr,none": 0.030166316298847997,
						"acc_stderr,none": 0.030166316298847997,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03489370652018761,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2595419847328244,
						"acc_norm,none": 0.2595419847328244,
						"acc_norm_stderr,none": 0.03844876139785271,
						"acc_stderr,none": 0.03844876139785271,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.23529411764705882,
						"acc_norm,none": 0.23529411764705882,
						"acc_norm_stderr,none": 0.03650781710789269,
						"acc_stderr,none": 0.03650781710789269,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.27102803738317754,
						"acc_norm,none": 0.27102803738317754,
						"acc_norm_stderr,none": 0.04317273776566669,
						"acc_stderr,none": 0.04317273776566669,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2693498452012384,
						"acc_norm,none": 0.2693498452012384,
						"acc_norm_stderr,none": 0.02472208923080204,
						"acc_stderr,none": 0.02472208923080204,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.24509803921568626,
						"acc_norm,none": 0.24509803921568626,
						"acc_norm_stderr,none": 0.030190282453501954,
						"acc_stderr,none": 0.030190282453501954,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.24581005586592178,
						"acc_norm,none": 0.24581005586592178,
						"acc_norm_stderr,none": 0.032272320235413,
						"acc_stderr,none": 0.032272320235413,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.26582278481012656,
						"acc_norm,none": 0.26582278481012656,
						"acc_norm_stderr,none": 0.028756799629658325,
						"acc_stderr,none": 0.028756799629658325,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800374,
						"acc_stderr,none": 0.04142972007800374,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.04556837693674772,
						"acc_stderr,none": 0.04556837693674772,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.042520162237633115,
						"acc_stderr,none": 0.042520162237633115,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980982,
						"acc_stderr,none": 0.03957835471980982,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.04117581097845102,
						"acc_stderr,none": 0.04117581097845102,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2490842490842491,
						"acc_norm,none": 0.2490842490842491,
						"acc_norm_stderr,none": 0.02622311550050611,
						"acc_stderr,none": 0.02622311550050611,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.22549019607843138,
						"acc_norm,none": 0.22549019607843138,
						"acc_norm_stderr,none": 0.029331162294251735,
						"acc_stderr,none": 0.029331162294251735,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.26900584795321636,
						"acc_norm,none": 0.26900584795321636,
						"acc_norm_stderr,none": 0.0340105262010409,
						"acc_stderr,none": 0.0340105262010409,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2589928057553957,
						"acc_norm,none": 0.2589928057553957,
						"acc_norm_stderr,none": 0.03729198658164233,
						"acc_stderr,none": 0.03729198658164233,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.23270440251572327,
						"acc_norm,none": 0.23270440251572327,
						"acc_norm_stderr,none": 0.03361670240809545,
						"acc_stderr,none": 0.03361670240809545,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2392638036809816,
						"acc_norm,none": 0.2392638036809816,
						"acc_norm_stderr,none": 0.033519538795212696,
						"acc_stderr,none": 0.033519538795212696,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2616279069767442,
						"acc_norm,none": 0.2616279069767442,
						"acc_norm_stderr,none": 0.03361101403890495,
						"acc_stderr,none": 0.03361101403890495,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.02775179241879092,
						"acc_stderr,none": 0.02775179241879092,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2474747474747475,
						"acc_norm,none": 0.2474747474747475,
						"acc_norm_stderr,none": 0.030746300742124505,
						"acc_stderr,none": 0.030746300742124505,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.028657491285071963,
						"acc_stderr,none": 0.028657491285071963,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.23043478260869565,
						"acc_norm,none": 0.23043478260869565,
						"acc_norm_stderr,none": 0.027827807522276156,
						"acc_stderr,none": 0.027827807522276156,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.037857144650666544,
						"acc_stderr,none": 0.037857144650666544,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.036421927837417066,
						"acc_stderr,none": 0.036421927837417066,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26704545454545453,
						"acc_norm,none": 0.26704545454545453,
						"acc_norm_stderr,none": 0.03344352850079125,
						"acc_stderr,none": 0.03344352850079125,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2550335570469799,
						"acc_norm,none": 0.2550335570469799,
						"acc_norm_stderr,none": 0.03582912165111174,
						"acc_stderr,none": 0.03582912165111174,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.23728813559322035,
						"acc_norm,none": 0.23728813559322035,
						"acc_norm_stderr,none": 0.039330125499343845,
						"acc_stderr,none": 0.039330125499343845,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.0333464540866534,
						"acc_stderr,none": 0.0333464540866534,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.04265792110940588,
						"acc_stderr,none": 0.04265792110940588,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.1958041958041958,
						"acc_norm,none": 0.1958041958041958,
						"acc_norm_stderr,none": 0.03330026780648431,
						"acc_stderr,none": 0.03330026780648431,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.24603174603174602,
						"acc_norm,none": 0.24603174603174602,
						"acc_norm_stderr,none": 0.038522733649243156,
						"acc_stderr,none": 0.038522733649243156,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.22162162162162163,
						"acc_norm,none": 0.22162162162162163,
						"acc_norm_stderr,none": 0.030619107991457346,
						"acc_stderr,none": 0.030619107991457346,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.03336605189761062,
						"acc_stderr,none": 0.03336605189761062,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.24574209245742093,
						"acc_norm,none": 0.24574209245742093,
						"acc_norm_stderr,none": 0.021262179663182224,
						"acc_stderr,none": 0.021262179663182224,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.24299065420560748,
						"acc_norm,none": 0.24299065420560748,
						"acc_norm_stderr,none": 0.029387023754333115,
						"acc_stderr,none": 0.029387023754333115,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03887917804888516,
						"acc_stderr,none": 0.03887917804888516,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2459016393442623,
						"acc_norm,none": 0.2459016393442623,
						"acc_norm_stderr,none": 0.039147319035957334,
						"acc_stderr,none": 0.039147319035957334,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2523809523809524,
						"acc_norm,none": 0.2523809523809524,
						"acc_norm_stderr,none": 0.03004659915603149,
						"acc_stderr,none": 0.03004659915603149,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032364888900157734,
						"acc_stderr,none": 0.032364888900157734,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871163,
						"acc_stderr,none": 0.03152480234871163,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2620689655172414,
						"acc_norm,none": 0.2620689655172414,
						"acc_norm_stderr,none": 0.036646663372252565,
						"acc_stderr,none": 0.036646663372252565,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.041764667586049006,
						"acc_stderr,none": 0.041764667586049006,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.26857142857142857,
						"acc_norm,none": 0.26857142857142857,
						"acc_norm_stderr,none": 0.033600151915923894,
						"acc_stderr,none": 0.033600151915923894,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26066350710900477,
						"acc_norm,none": 0.26066350710900477,
						"acc_norm_stderr,none": 0.030293645661742804,
						"acc_stderr,none": 0.030293645661742804,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2579787234042553,
						"acc_norm,none": 0.2579787234042553,
						"acc_norm_stderr,none": 0.022593550801056263,
						"acc_stderr,none": 0.022593550801056263,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.24568965517241378,
						"acc_norm,none": 0.24568965517241378,
						"acc_norm_stderr,none": 0.028324514684171142,
						"acc_stderr,none": 0.028324514684171142,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.23563218390804597,
						"acc_norm,none": 0.23563218390804597,
						"acc_norm_stderr,none": 0.032266023739324454,
						"acc_stderr,none": 0.032266023739324454,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.22962962962962963,
						"acc_norm,none": 0.22962962962962963,
						"acc_norm_stderr,none": 0.036333844140734636,
						"acc_stderr,none": 0.036333844140734636,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.028952167450890808,
						"acc_stderr,none": 0.028952167450890808,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139405,
						"acc_stderr,none": 0.03374402644139405,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2422360248447205,
						"acc_norm,none": 0.2422360248447205,
						"acc_norm_stderr,none": 0.033870869961530825,
						"acc_stderr,none": 0.033870869961530825,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.03404916326237584,
						"acc_stderr,none": 0.03404916326237584,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.04427163276171355,
						"mcc_stderr,none": 0.028535116697391772
					},
					"copa": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.050211673156867795,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 7.2762197666247825,
						"likelihood_diff_stderr,none": 1.5405441062216259,
						"pct_stereotype,none": 0.4669051878354204,
						"pct_stereotype_stderr,none": 0.07361618986038487
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 6.142133882234831,
						"likelihood_diff_stderr,none": 0.151868479679465,
						"pct_stereotype,none": 0.5247465712581991,
						"pct_stereotype_stderr,none": 0.01219833137408679
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 5.829931699312651,
						"likelihood_diff_stderr,none": 0.47189559339930603,
						"pct_stereotype,none": 0.4945054945054945,
						"pct_stereotype_stderr,none": 0.052701445311128796
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 10.251455827192826,
						"likelihood_diff_stderr,none": 3.812120073880832,
						"pct_stereotype,none": 0.5454545454545454,
						"pct_stereotype_stderr,none": 0.1574591643244434
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 7.947497089092548,
						"likelihood_diff_stderr,none": 0.7979660309398204,
						"pct_stereotype,none": 0.5076923076923077,
						"pct_stereotype_stderr,none": 0.062492603112584276
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 5.553816711902618,
						"likelihood_diff_stderr,none": 0.3489961883382409,
						"pct_stereotype,none": 0.5125,
						"pct_stereotype_stderr,none": 0.02798587585995665
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 6.411848386128743,
						"likelihood_diff_stderr,none": 0.44662009526820967,
						"pct_stereotype,none": 0.39814814814814814,
						"pct_stereotype_stderr,none": 0.03338473403207401
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 5.364331457349989,
						"likelihood_diff_stderr,none": 0.5827947594958346,
						"pct_stereotype,none": 0.5416666666666666,
						"pct_stereotype_stderr,none": 0.05913268547421809
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 5.613429910554661,
						"likelihood_diff_stderr,none": 0.24767394176379157,
						"pct_stereotype,none": 0.5452755905511811,
						"pct_stereotype_stderr,none": 0.02211455387069533
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 7.342658996582031,
						"likelihood_diff_stderr,none": 0.6844595880406099,
						"pct_stereotype,none": 0.6126126126126126,
						"pct_stereotype_stderr,none": 0.0464482507235508
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 8.129508767076718,
						"likelihood_diff_stderr,none": 0.8241853112772043,
						"pct_stereotype,none": 0.6129032258064516,
						"pct_stereotype_stderr,none": 0.050782235596722784
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 6.175443568982576,
						"likelihood_diff_stderr,none": 0.43572929826400203,
						"pct_stereotype,none": 0.5368421052631579,
						"pct_stereotype_stderr,none": 0.036270781985214155
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 8.407523183077663,
						"likelihood_diff_stderr,none": 0.18902490647884682,
						"pct_stereotype,none": 0.4108527131782946,
						"pct_stereotype_stderr,none": 0.012017607439726758
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 6.6777710808648,
						"likelihood_diff_stderr,none": 0.5908686413560221,
						"pct_stereotype,none": 0.5111111111111111,
						"pct_stereotype_stderr,none": 0.05298680599073449
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 8.568145751953125,
						"likelihood_diff_stderr,none": 2.0345774393584732,
						"pct_stereotype,none": 0.3076923076923077,
						"pct_stereotype_stderr,none": 0.13323467750529824
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 11.581617991129557,
						"likelihood_diff_stderr,none": 1.0119068899562509,
						"pct_stereotype,none": 0.30303030303030304,
						"pct_stereotype_stderr,none": 0.057002420795512765
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 6.8130323820025005,
						"likelihood_diff_stderr,none": 0.30665219459054915,
						"pct_stereotype,none": 0.5358255451713395,
						"pct_stereotype_stderr,none": 0.027879009258377073
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 13.377566597678445,
						"likelihood_diff_stderr,none": 0.5735097770512304,
						"pct_stereotype,none": 0.2766798418972332,
						"pct_stereotype_stderr,none": 0.028180829560220628
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 7.915651162465413,
						"likelihood_diff_stderr,none": 0.9638675160215517,
						"pct_stereotype,none": 0.5972222222222222,
						"pct_stereotype_stderr,none": 0.058206509425695316
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 6.834952951514203,
						"likelihood_diff_stderr,none": 0.3360167707519189,
						"pct_stereotype,none": 0.34130434782608693,
						"pct_stereotype_stderr,none": 0.022131302075323833
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 8.886236572265625,
						"likelihood_diff_stderr,none": 0.70513622835588,
						"pct_stereotype,none": 0.30434782608695654,
						"pct_stereotype_stderr,none": 0.0430951850246393
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 7.7028516832288805,
						"likelihood_diff_stderr,none": 0.8167175990862374,
						"pct_stereotype,none": 0.6263736263736264,
						"pct_stereotype_stderr,none": 0.0509934316638677
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 8.26336638781489,
						"likelihood_diff_stderr,none": 0.5636042512572683,
						"pct_stereotype,none": 0.41836734693877553,
						"pct_stereotype_stderr,none": 0.035325309438765586
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.002952755905511811,
						"exact_match_stderr,none": 0.0012039728135357858
					},
					"glue": {
						"acc,none": 0.44440788068071435,
						"acc_stderr,none": 0.02486305485824581,
						"alias": "glue",
						"f1,none": 0.49602615371488135,
						"f1_stderr,none": 0.00022628131485044538,
						"mcc,none": -0.04427163276171355,
						"mcc_stderr,none": 0.0008142528849337667
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.009855951478392721,
						"exact_match_stderr,get-answer": 0.002721076577041659
					},
					"hellaswag": {
						"acc,none": 0.28579964150567616,
						"acc_norm,none": 0.303823939454292,
						"acc_norm_stderr,none": 0.004589676274079085,
						"acc_stderr,none": 0.004508710891053851,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.10141495812879006,
						"acc_norm,none": 0.10141495812879006,
						"acc_norm_stderr,none": 0.0636223216843437,
						"acc_stderr,none": 0.0636223216843437,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.19,
						"acc_norm,none": 0.19,
						"acc_norm_stderr,none": 0.03942772444036623,
						"acc_stderr,none": 0.03942772444036623,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.095,
						"acc_norm,none": 0.095,
						"acc_norm_stderr,none": 0.009276910103103294,
						"acc_stderr,none": 0.009276910103103294,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.076,
						"acc_norm,none": 0.076,
						"acc_norm_stderr,none": 0.008384169266796391,
						"acc_stderr,none": 0.008384169266796391,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.193,
						"acc_norm,none": 0.193,
						"acc_norm_stderr,none": 0.012486268734370098,
						"acc_stderr,none": 0.012486268734370098,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.196,
						"acc_norm,none": 0.196,
						"acc_norm_stderr,none": 0.012559527926707354,
						"acc_stderr,none": 0.012559527926707354,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.17833333333333334,
						"acc_norm,none": 0.17833333333333334,
						"acc_norm_stderr,none": 0.015640501955765617,
						"acc_stderr,none": 0.015640501955765617,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.012,
						"acc_norm,none": 0.012,
						"acc_norm_stderr,none": 0.003444977194099821,
						"acc_stderr,none": 0.003444977194099821,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.006,
						"acc_norm,none": 0.006,
						"acc_norm_stderr,none": 0.002443352199329852,
						"acc_stderr,none": 0.002443352199329852,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.021,
						"acc_norm,none": 0.021,
						"acc_norm_stderr,none": 0.004536472151306499,
						"acc_stderr,none": 0.004536472151306499,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.02912242397001744,
						"acc_stderr,none": 0.02912242397001744,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.035,
						"acc_norm,none": 0.035,
						"acc_norm_stderr,none": 0.005814534272734973,
						"acc_stderr,none": 0.005814534272734973,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.040347329239296424,
						"acc_stderr,none": 0.040347329239296424,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.027,
						"acc_norm,none": 0.027,
						"acc_norm_stderr,none": 0.00512808904927529,
						"acc_stderr,none": 0.00512808904927529,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.022,
						"acc_norm,none": 0.022,
						"acc_norm_stderr,none": 0.0046408552592747026,
						"acc_stderr,none": 0.0046408552592747026,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.012285191326386702,
						"acc_stderr,none": 0.012285191326386702,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.049,
						"acc_norm,none": 0.049,
						"acc_norm_stderr,none": 0.006829761756140926,
						"acc_stderr,none": 0.006829761756140926,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.132,
						"acc_norm,none": 0.132,
						"acc_norm_stderr,none": 0.010709373963528024,
						"acc_stderr,none": 0.010709373963528024,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.113,
						"acc_norm,none": 0.113,
						"acc_norm_stderr,none": 0.01001655286669686,
						"acc_stderr,none": 0.01001655286669686,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.082,
						"acc_norm,none": 0.082,
						"acc_norm_stderr,none": 0.008680515615523719,
						"acc_stderr,none": 0.008680515615523719,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.079,
						"acc_norm,none": 0.079,
						"acc_norm_stderr,none": 0.008534156773333443,
						"acc_stderr,none": 0.008534156773333443,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.022,
						"acc_norm,none": 0.022,
						"acc_norm_stderr,none": 0.004640855259274703,
						"acc_stderr,none": 0.004640855259274703,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.034,
						"acc_norm,none": 0.034,
						"acc_norm_stderr,none": 0.005733836139695465,
						"acc_stderr,none": 0.005733836139695465,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.057,
						"acc_norm,none": 0.057,
						"acc_norm_stderr,none": 0.007335175853706822,
						"acc_stderr,none": 0.007335175853706822,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.231,
						"acc_norm,none": 0.231,
						"acc_norm_stderr,none": 0.013334797216936426,
						"acc_stderr,none": 0.013334797216936426,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.075,
						"acc_norm,none": 0.075,
						"acc_norm_stderr,none": 0.008333333333333361,
						"acc_stderr,none": 0.008333333333333361,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.012285191326386712,
						"acc_stderr,none": 0.012285191326386712,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.14166666666666666,
						"acc_norm,none": 0.14166666666666666,
						"acc_norm_stderr,none": 0.014247819867919655,
						"acc_stderr,none": 0.014247819867919655,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.102,
						"acc_norm,none": 0.102,
						"acc_norm_stderr,none": 0.009575368801653897,
						"acc_stderr,none": 0.009575368801653897,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.113,
						"acc_norm,none": 0.113,
						"acc_norm_stderr,none": 0.010016552866696839,
						"acc_stderr,none": 0.010016552866696839,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.066,
						"acc_norm,none": 0.066,
						"acc_norm_stderr,none": 0.007855297938697579,
						"acc_stderr,none": 0.007855297938697579,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.085,
						"acc_norm,none": 0.085,
						"acc_norm_stderr,none": 0.008823426366942317,
						"acc_stderr,none": 0.008823426366942317,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22666666666666666,
						"acc_norm,none": 0.22666666666666666,
						"acc_norm_stderr,none": 0.024212609617951908,
						"acc_stderr,none": 0.024212609617951908,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.238,
						"acc_norm,none": 0.238,
						"acc_norm_stderr,none": 0.013473586661967222,
						"acc_stderr,none": 0.013473586661967222,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.041,
						"acc_norm,none": 0.041,
						"acc_norm_stderr,none": 0.006273624021118776,
						"acc_stderr,none": 0.006273624021118776,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.114,
						"acc_norm,none": 0.114,
						"acc_norm_stderr,none": 0.010055103435823333,
						"acc_stderr,none": 0.010055103435823333,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.02752568467055655,
						"acc_stderr,none": 0.02752568467055655,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.136,
						"acc_norm,none": 0.136,
						"acc_norm_stderr,none": 0.010845350230472986,
						"acc_stderr,none": 0.010845350230472986,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.154,
						"acc_norm,none": 0.154,
						"acc_norm_stderr,none": 0.011419913065098706,
						"acc_stderr,none": 0.011419913065098706,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.026,
						"acc_norm,none": 0.026,
						"acc_norm_stderr,none": 0.0050348137353182195,
						"acc_stderr,none": 0.0050348137353182195,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.47226485419864067,
						"acc_norm,none": 0.422,
						"acc_norm_stderr,none": 0.0004888096192384763,
						"acc_stderr,none": 0.042406016547196064,
						"alias": "kobest",
						"f1,none": 0.3611138434607754,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.471,
						"acc_stderr,none": 0.0157926694516289,
						"alias": " - kobest_copa",
						"f1,none": 0.46972681408060757,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.326,
						"acc_norm,none": 0.422,
						"acc_norm_stderr,none": 0.02210903931061855,
						"acc_stderr,none": 0.020984009562393567,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.32340732857201415,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5037783375314862,
						"acc_stderr,none": 0.025125227983562776,
						"alias": " - kobest_sentineg",
						"f1,none": 0.33500837520938026,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.09596351639821463,
						"acc_stderr,none": 0.006615671208021231,
						"alias": "lambada",
						"perplexity,none": 2685.506801586333,
						"perplexity_stderr,none": 328.29262962059437
					},
					"lambada_cloze": {
						"acc,none": 0.015330875218319426,
						"acc_stderr,none": 0.006159478702424399,
						"alias": "lambada_cloze",
						"perplexity,none": 45681.71788121615,
						"perplexity_stderr,none": 8761.14358133287
					},
					"lambada_multilingual": {
						"acc,none": 0.05344459538133126,
						"acc_stderr,none": 0.015997797491782328,
						"alias": "lambada_multilingual",
						"perplexity,none": 604484.5857821594,
						"perplexity_stderr,none": 264539.52930330223
					},
					"lambada_openai": {
						"acc,none": 0.10576363283524161,
						"acc_stderr,none": 0.004284561852149176,
						"alias": " - lambada_openai",
						"perplexity,none": 2122.0844159549133,
						"perplexity_stderr,none": 134.23442502928177
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.027168639627401514,
						"acc_stderr,none": 0.002264982237403286,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 28875.445340838723,
						"perplexity_stderr,none": 1667.3469626267367
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.02910925674364448,
						"acc_stderr,none": 0.0023421397849860037,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 682813.4391532716,
						"perplexity_stderr,none": 69505.83748105535
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.10576363283524161,
						"acc_stderr,none": 0.004284561852149178,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 2122.3272942513076,
						"perplexity_stderr,none": 134.25944084761983
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.03842421890161071,
						"acc_stderr,none": 0.0026779739919852753,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 1270992.4946002422,
						"perplexity_stderr,none": 123123.89922075765
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.052784785561808656,
						"acc_stderr,none": 0.00311523480388682,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 236573.49296881474,
						"perplexity_stderr,none": 20803.390760438833
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.041141082864350864,
						"acc_stderr,none": 0.002767115504505753,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 829921.174894217,
						"perplexity_stderr,none": 77076.55086051516
					},
					"lambada_standard": {
						"acc,none": 0.08558121482631477,
						"acc_stderr,none": 0.003897389995399007,
						"alias": " - lambada_standard",
						"perplexity,none": 3248.5146377011006,
						"perplexity_stderr,none": 197.55332813127168
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.0034931108092373375,
						"acc_stderr,none": 0.0008219746177035982,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 62487.99042159358,
						"perplexity_stderr,none": 3081.7716184838923
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.23155216284987276,
						"exact_match_stderr,get-answer": 0.010642496713710918
					},
					"logiqa": {
						"acc,none": 0.21658986175115208,
						"acc_norm,none": 0.27342549923195086,
						"acc_norm_stderr,none": 0.01748247454768128,
						"acc_stderr,none": 0.016156860583178303,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.24236641221374045,
						"acc_norm,none": 0.2684478371501272,
						"acc_norm_stderr,none": 0.011180584582096646,
						"acc_stderr,none": 0.010811295412400647,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.240536013400335,
						"acc_norm,none": 0.24723618090452262,
						"acc_norm_stderr,none": 0.007897433402182866,
						"acc_stderr,none": 0.007824277362109033,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.4841135352679517,
						"acc_stderr,none": 0.005143297913231224,
						"alias": "mc_taco",
						"f1,none": 0.42320899940793366,
						"f1_stderr,none": 0.006748038915526225
					},
					"medmcqa": {
						"acc,none": 0.2514941429595984,
						"acc_norm,none": 0.2514941429595984,
						"acc_norm_stderr,none": 0.006709181254650129,
						"acc_stderr,none": 0.006709181254650129,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.22388059701492538,
						"acc_norm,none": 0.22388059701492538,
						"acc_norm_stderr,none": 0.01168769696955638,
						"acc_stderr,none": 0.01168769696955638,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.2519584104828372,
						"acc_stderr,none": 0.03696200615821548,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909282,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.2740740740740741,
						"acc_stderr,none": 0.03853254836552003,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.19736842105263158,
						"acc_stderr,none": 0.03238981601699397,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2528301886792453,
						"acc_stderr,none": 0.026749899771241235,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.03942082639927213,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.041633319989322695,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.17,
						"acc_stderr,none": 0.0377525168068637,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932268,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2023121387283237,
						"acc_stderr,none": 0.03063114553919882,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.22549019607843138,
						"acc_stderr,none": 0.04158307533083286,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3021276595744681,
						"acc_stderr,none": 0.03001755447188055,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21052631578947367,
						"acc_stderr,none": 0.0383515395439942,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.23448275862068965,
						"acc_stderr,none": 0.035306258743465914,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2671957671957672,
						"acc_stderr,none": 0.022789673145776578,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.24603174603174602,
						"acc_stderr,none": 0.038522733649243183,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.25161290322580643,
						"acc_stderr,none": 0.024685979286239956,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2413793103448276,
						"acc_stderr,none": 0.030108330718011625,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036624,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.28484848484848485,
						"acc_stderr,none": 0.035243908445117836,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.031911782267135466,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.23316062176165803,
						"acc_stderr,none": 0.030516111371476008,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.24871794871794872,
						"acc_stderr,none": 0.0219169577092138,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2518518518518518,
						"acc_stderr,none": 0.026466117538959912,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.22268907563025211,
						"acc_stderr,none": 0.02702543349888237,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2052980132450331,
						"acc_stderr,none": 0.03297986648473837,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.24220183486238533,
						"acc_stderr,none": 0.01836817630659862,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.18055555555555555,
						"acc_stderr,none": 0.026232878971491666,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.22058823529411764,
						"acc_stderr,none": 0.029102254389674082,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.24050632911392406,
						"acc_stderr,none": 0.027820781981149675,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3273542600896861,
						"acc_stderr,none": 0.031493846709941306,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.24427480916030533,
						"acc_stderr,none": 0.037683359597287434,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.251009564293305,
						"acc_stderr,none": 0.02560891633259949,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.04065578140908705,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.23148148148148148,
						"acc_stderr,none": 0.04077494709252627,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.24539877300613497,
						"acc_stderr,none": 0.03380939813943354,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04109974682633932,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.24271844660194175,
						"acc_stderr,none": 0.04245022486384495,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.24358974358974358,
						"acc_stderr,none": 0.028120966503914407,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2950191570881226,
						"acc_stderr,none": 0.016308363772932724,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.27167630057803466,
						"acc_stderr,none": 0.023948512905468348,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2435754189944134,
						"acc_stderr,none": 0.014355911964767864,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.024288619466046123,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.26874798841326036,
						"acc_stderr,none": 0.040250632694020225,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.26688102893890675,
						"acc_stderr,none": 0.025122637608816646,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.23148148148148148,
						"acc_stderr,none": 0.02346842983245116,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.28368794326241137,
						"acc_stderr,none": 0.026891709428343957,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2561929595827901,
						"acc_stderr,none": 0.011149173153110582,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.025767252010855952,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.017848089574913222,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2909090909090909,
						"acc_stderr,none": 0.04350271442923243,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.21224489795918366,
						"acc_stderr,none": 0.026176967197866767,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.24666883327916803,
						"acc_stderr,none": 0.03167665564890943,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.23880597014925373,
						"acc_stderr,none": 0.030147775935409217,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.24199175388518868,
						"acc_stderr,none": 0.04810567086298941,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.26506024096385544,
						"acc_stderr,none": 0.03436024037944967,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.23391812865497075,
						"acc_stderr,none": 0.03246721765117825,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.39765664798777384,
						"acc_stderr,none": 0.0049402981263918485,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.40703824247355574,
						"acc_stderr,none": 0.004954868141845814,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.5318627450980392,
						"acc_stderr,none": 0.024733705353784115,
						"alias": "mrpc",
						"f1,none": 0.6570915619389587,
						"f1_stderr,none": 0.023190186868042292
					},
					"multimedqa": {
						"acc,none": 0.26430092264017035,
						"acc_norm,none": 0.2403285730488725,
						"acc_norm_stderr,none": 9.966281611173883e-05,
						"acc_stderr,none": 0.0633511490625171,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.4808168316831683,
						"acc_stderr,none": 0.007176515439298641,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6007336365988238,
						"mrr_stderr,none": 0.010209990473705653,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.445823927765237,
						"r@2_stderr,none": 0.016708364331702853
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.5903875114686333,
						"mrr_stderr,none": 0.010222248566131447,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4785553047404063,
						"r@2_stderr,none": 0.016791850493288407
					},
					"openbookqa": {
						"acc,none": 0.13,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.019384310743640384,
						"acc_stderr,none": 0.015055009352810986,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4845,
						"acc_stderr,none": 0.011177761232603322,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.459,
						"acc_stderr,none": 0.011145474902641254,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.011147292544180011,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.4845,
						"acc_stderr,none": 0.011177761232603322,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5535,
						"acc_stderr,none": 0.01111893386729012,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5475,
						"acc_stderr,none": 0.011132557743886098,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.011183136021064612,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.49842857142857144,
						"acc_stderr,none": 0.028480163371254853,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.5642002176278563,
						"acc_norm,none": 0.55930359085963,
						"acc_norm_stderr,none": 0.011583478090657129,
						"acc_stderr,none": 0.011569259195486618,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.24455593509820667,
						"acc_norm,none": 0.28309137489325364,
						"acc_norm_stderr,none": 0.0032913068437153263,
						"acc_stderr,none": 0.0031402441238755517,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.5895999175269849,
						"acc_norm,none": 0.31729613914072274,
						"acc_norm_stderr,none": 0.003787079937645793,
						"acc_stderr,none": 0.18426968914343322,
						"alias": "pythia",
						"bits_per_byte,none": 1.5594074450869237,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 2.9473276366452805,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 2122.0844159549133,
						"perplexity_stderr,none": 134.23442502928177,
						"word_perplexity,none": 323.77647124335505,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.25354609929078015,
						"acc_norm,none": 0.2978723404255319,
						"acc_norm_stderr,none": 0.03532668733253244,
						"acc_stderr,none": 0.03736129272535806,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.19166666666666668,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.04200840252084027,
						"acc_stderr,none": 0.03608237023316683,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.23125,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03343758265727745,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.29225352112676056,
						"acc_norm,none": 0.31690140845070425,
						"acc_norm_stderr,none": 0.02765734975848418,
						"acc_stderr,none": 0.02703494666251675,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5205930807248764,
						"acc_stderr,none": 0.006759670033729402,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.44889933217907496,
						"acc_stderr,none": 0.0024736796063615945,
						"alias": "qqp",
						"f1,none": 0.49468169550722335,
						"f1_stderr,none": 0.002924265018373409
					},
					"race": {
						"acc,none": 0.261244019138756,
						"acc_stderr,none": 0.013596384379756748,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5306859205776173,
						"acc_stderr,none": 0.03003973059219781,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.707,
						"acc_norm,none": 0.647,
						"acc_norm_stderr,none": 0.015120172605483699,
						"acc_stderr,none": 0.014399942998441276,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5342960288808665,
						"acc_stderr,none": 0.030025579819366426,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.5401376146788991,
						"acc_stderr,none": 0.016887177857495637,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.3390482855143457,
						"acc_norm,none": 0.3954313705888233,
						"acc_norm_stderr,none": 0.0034569180662148333,
						"acc_stderr,none": 0.003346927964343634,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.4558583740973678,
						"acc_stderr,none": 0.03512410550577622,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.005004255426437999,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.3656633221850613,
						"acc_stderr,none": 0.004848752945663641,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.4999019607843137,
						"acc_stderr,none": 0.004950980320775898,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3541550349800609,
						"acc_stderr,none": 0.06046304832478659,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.0002590774867436336,
						"bleu_diff,none": -3.058967803850556,
						"bleu_diff_stderr,none": 0.21510156755834858,
						"bleu_max,none": 15.376870587650536,
						"bleu_max_stderr,none": 0.3363566486620398,
						"rouge1_acc,none": 0.29498164014687883,
						"rouge1_acc_stderr,none": 0.00025486209819011995,
						"rouge1_diff,none": -5.235627921489654,
						"rouge1_diff_stderr,none": 0.3530098275098548,
						"rouge1_max,none": 38.26302707107735,
						"rouge1_max_stderr,none": 0.5606026731446057,
						"rouge2_acc,none": 0.2141982864137087,
						"rouge2_acc_stderr,none": 0.0002062712996460027,
						"rouge2_diff,none": -5.338754231297163,
						"rouge2_diff_stderr,none": 0.40032250880397335,
						"rouge2_max,none": 21.28898467311369,
						"rouge2_max_stderr,none": 0.6258049586978924,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.0002485279995361043,
						"rougeL_diff,none": -5.223026841958686,
						"rougeL_diff_stderr,none": 0.3253340484774495,
						"rougeL_max,none": 35.181796595837234,
						"rougeL_max_stderr,none": 0.5587965905722869
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.016095884155386854,
						"bleu_diff,none": -3.058967803850556,
						"bleu_diff_stderr,none": 0.46379043495780353,
						"bleu_max,none": 15.376870587650536,
						"bleu_max_stderr,none": 0.5799626269528406,
						"rouge1_acc,none": 0.29498164014687883,
						"rouge1_acc_stderr,none": 0.015964400965589657,
						"rouge1_diff,none": -5.235627921489654,
						"rouge1_diff_stderr,none": 0.594146301435812,
						"rouge1_max,none": 38.26302707107735,
						"rouge1_max_stderr,none": 0.7487340470050802,
						"rouge2_acc,none": 0.2141982864137087,
						"rouge2_acc_stderr,none": 0.014362148155690454,
						"rouge2_diff,none": -5.338754231297163,
						"rouge2_diff_stderr,none": 0.6327104462579809,
						"rouge2_max,none": 21.28898467311369,
						"rouge2_max_stderr,none": 0.7910783518071345,
						"rougeL_acc,none": 0.2827417380660955,
						"rougeL_acc_stderr,none": 0.015764770836777308,
						"rougeL_diff,none": -5.223026841958686,
						"rougeL_diff_stderr,none": 0.570380617199997,
						"rougeL_max,none": 35.181796595837234,
						"rougeL_max_stderr,none": 0.7475269831733747
					},
					"truthfulqa_mc1": {
						"acc,none": 0.23011015911872704,
						"acc_stderr,none": 0.014734557959807763,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4161774729107278,
						"acc_stderr,none": 0.01562427359572552,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.002952755905511811,
						"exact_match_stderr,none": 0.0012039728135357858
					},
					"wic": {
						"acc,none": 0.5047021943573667,
						"acc_stderr,none": 0.01980984521925977,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 1.5594074450869237,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 2.9473276366452805,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 323.77647124335505,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.4996053670086819,
						"acc_stderr,none": 0.014052481306049516,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.059613057849722415,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.4230769230769231,
						"acc_stderr,none": 0.04867993747918684,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.5018315018315018,
						"acc_stderr,none": 0.030316749738547345,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5176363636363636,
						"acc_stderr,none": 0.02961243699532362,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.02236516042423134,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.02238020883492804,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.022342748192502846,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.022352791650914167,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.022221331534143025,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.02236139673920788,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044303,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.02231833811987053,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.34125836680053545,
						"acc_stderr,none": 0.019307682076867023,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3285140562248996,
						"acc_stderr,none": 0.009414190734131758,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3317269076305221,
						"acc_stderr,none": 0.009437454900329122,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.3365461847389558,
						"acc_stderr,none": 0.009471423054177136,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3325301204819277,
						"acc_stderr,none": 0.009443193365903336,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.40321285140562246,
						"acc_stderr,none": 0.009832511560868071,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.3257028112449799,
						"acc_stderr,none": 0.009393425164263997,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.3674698795180723,
						"acc_stderr,none": 0.009663601903728029,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667053,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.3433734939759036,
						"acc_stderr,none": 0.009517658993060703,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3365461847389558,
						"acc_stderr,none": 0.009471423054177138,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.009460223484996469,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3365461847389558,
						"acc_stderr,none": 0.009471423054177128,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3437751004016064,
						"acc_stderr,none": 0.009520310502882934,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667053,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3305220883534137,
						"acc_stderr,none": 0.009428789109289824,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.48571084772276035,
						"acc_stderr,none": 0.01917668870742635,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.47253474520185307,
						"acc_stderr,none": 0.01284769827038822,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.5241561879549967,
						"acc_stderr,none": 0.012852100057309607,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.49702183984116477,
						"acc_stderr,none": 0.012866897066011239,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.4943745863666446,
						"acc_stderr,none": 0.012866310923072518,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.47121111846459296,
						"acc_stderr,none": 0.01284577907071948,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.45400397088021177,
						"acc_stderr,none": 0.012812565368728938,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4831237590999338,
						"acc_stderr,none": 0.012859793919977602,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.48444738583719393,
						"acc_stderr,none": 0.01286089911147079,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.4804765056254136,
						"acc_stderr,none": 0.012857312531836862,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.499669093315685,
						"acc_stderr,none": 0.012867122498493415,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.4818001323626737,
						"acc_stderr,none": 0.012858598401831846,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.5183187233086087,
						"acc_stderr,none": 0.02177253452137737,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.5384946236559139,
						"acc_stderr,none": 0.010340963526502454,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.4819277108433735,
						"acc_stderr,none": 0.05517968347010931,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.49635036496350365,
						"acc_stderr,none": 0.016153836393684517,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.49809885931558934,
						"acc_stderr,none": 0.030889879865535992,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5015873015873016,
						"acc_stderr,none": 0.02821649021370027,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.49404761904761907,
						"acc_stderr,none": 0.0222923182643797,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "microsoft/phi-1"
	},
	"microsoft/phi-1_5": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.656989853438557,
						"acc_norm,none": 0.6471251409244645,
						"acc_norm_stderr,none": 0.04066559105017589,
						"acc_stderr,none": 0.0495439909286596,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3175,
						"acc_stderr,none": 0.01995201706221973,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.15315,
						"acc_stderr,none": 0.1809240277347551,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.7898358208955224,
						"acc_stderr,none": 0.1512850864363504,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.24962852897473997,
						"acc_norm,none": 0.24962852897473997,
						"acc_norm_stderr,none": 0.1076499458752758,
						"acc_stderr,none": 0.1076499458752758,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.262303574512174,
						"acc_norm,none": 0.262303574512174,
						"acc_norm_stderr,none": 0.0446058504119728,
						"acc_stderr,none": 0.0446058504119728,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 5.402094309573097,
						"likelihood_diff_stderr,none": 1.0042893915957558,
						"pct_stereotype,none": 0.5256410256410257,
						"pct_stereotype_stderr,none": 0.08691435651372321
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0172244094488189,
						"exact_match_stderr,none": 0.0028869840818920704
					},
					"glue": {
						"acc,none": 0.5009835464592854,
						"acc_stderr,none": 0.03353312543236152,
						"alias": "glue",
						"f1,none": 0.3909623716904163,
						"f1_stderr,none": 2.6934101172870805e-05,
						"mcc,none": 0.0005119811361117536,
						"mcc_stderr,none": 0.0009530742287624148
					},
					"kmmlu": {
						"acc,none": 0.10023101357204736,
						"acc_norm,none": 0.10023101357204736,
						"acc_norm_stderr,none": 0.06394552161681331,
						"acc_stderr,none": 0.06394552161681331,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.4841043630782723,
						"acc_norm,none": 0.432,
						"acc_norm_stderr,none": 0.0004917354709418823,
						"acc_stderr,none": 0.04318103022327812,
						"alias": "kobest",
						"f1,none": 0.38759233902334245,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.4364447894430429,
						"acc_stderr,none": 0.045630131502273276,
						"alias": "lambada",
						"perplexity,none": 21.959680429510197,
						"perplexity_stderr,none": 6.584497341384194
					},
					"lambada_cloze": {
						"acc,none": 0.1342907044440132,
						"acc_stderr,none": 0.03591374323110126,
						"alias": "lambada_cloze",
						"perplexity,none": 271.50389603631925,
						"perplexity_stderr,none": 83.7389450632914
					},
					"lambada_multilingual": {
						"acc,none": 0.2034542984669125,
						"acc_stderr,none": 0.09840718188692064,
						"alias": "lambada_multilingual",
						"perplexity,none": 8998.898217246948,
						"perplexity_stderr,none": 6694.966618108296
					},
					"mmlu": {
						"acc,none": 0.4156103119213787,
						"acc_stderr,none": 0.09580239912197347,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3834218916046759,
						"acc_stderr,none": 0.09980914366380608,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.456710653363373,
						"acc_stderr,none": 0.08819785463063003,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4874878128046798,
						"acc_stderr,none": 0.08668149918781964,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.35299714557564227,
						"acc_stderr,none": 0.07260160225767888,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.36990773598296667,
						"acc_norm,none": 0.33304231926291905,
						"acc_norm_stderr,none": 0.00013082869092342827,
						"acc_stderr,none": 0.08369341742374357,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.48364285714285715,
						"acc_stderr,none": 0.03742491672433065,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7112002692685127,
						"acc_norm,none": 0.6498530476108814,
						"acc_norm_stderr,none": 0.0035303023483130268,
						"acc_stderr,none": 0.14901000015478819,
						"alias": "pythia",
						"bits_per_byte,none": 0.9100384399401059,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.8790955653906523,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 8.952081666500574,
						"perplexity_stderr,none": 0.29940013941158355,
						"word_perplexity,none": 29.169437028524897,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.39184397163120566,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.05314683784813257,
						"acc_stderr,none": 0.03643101455378746,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5724934278393398,
						"acc_stderr,none": 0.043529050370286135,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.33768559231342077,
						"acc_stderr,none": 0.0014856093902889046,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32558139534883723,
						"bleu_acc_stderr,none": 0.01640398946990783,
						"bleu_diff,none": -4.6115065977579235,
						"bleu_diff_stderr,none": 0.8901793950229382,
						"bleu_max,none": 28.188061252436995,
						"bleu_max_stderr,none": 0.7905622059974436,
						"rouge1_acc,none": 0.33047735618115054,
						"rouge1_acc_stderr,none": 0.016466769613698303,
						"rouge1_diff,none": -4.875337395009971,
						"rouge1_diff_stderr,none": 1.0314030062890773,
						"rouge1_max,none": 53.54443904556872,
						"rouge1_max_stderr,none": 0.8354753910839761,
						"rouge2_acc,none": 0.2876376988984088,
						"rouge2_acc_stderr,none": 0.015846315101394805,
						"rouge2_diff,none": -6.262800072222912,
						"rouge2_diff_stderr,none": 1.2035260865490716,
						"rouge2_max,none": 38.764281938826,
						"rouge2_max_stderr,none": 0.9852926715679251,
						"rougeL_acc,none": 0.31946144430844553,
						"rougeL_acc_stderr,none": 0.0163226441829605,
						"rougeL_diff,none": -5.1087067052813175,
						"rougeL_diff_stderr,none": 1.0442388598358738,
						"rougeL_max,none": 50.952837351048444,
						"rougeL_max_stderr,none": 0.862823336910652
					},
					"xcopa": {
						"acc,none": 0.5216363636363637,
						"acc_stderr,none": 0.02619795891999274,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.3557429718875502,
						"acc_stderr,none": 0.0470955966598085,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5175982191203898,
						"acc_stderr,none": 0.07780934553349826,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.6792537648909868,
						"acc_stderr,none": 0.0899142305270184,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.656989853438557,
						"acc_norm,none": 0.6471251409244645,
						"acc_norm_stderr,none": 0.04066559105017589,
						"acc_stderr,none": 0.0495439909286596,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3175,
						"acc_stderr,none": 0.01995201706221973,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.281,
						"acc_stderr,none": 0.014221154708434925,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.327,
						"acc_stderr,none": 0.014842213153411249,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.01368049572576779,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.44880546075085326,
						"acc_norm,none": 0.4786689419795222,
						"acc_norm_stderr,none": 0.014598087973127106,
						"acc_stderr,none": 0.014534599585097674,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7596801346801347,
						"acc_norm,none": 0.7302188552188552,
						"acc_norm_stderr,none": 0.009107527914671064,
						"acc_stderr,none": 0.008767553284156914,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.15315,
						"acc_stderr,none": 0.1809240277347551,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0855,
						"acc_stderr,none": 0.006254153197364767,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.5505,
						"acc_stderr,none": 0.011125950223877364,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.2515,
						"acc_stderr,none": 0.009704172323296926,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.011085280407858918,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0355,
						"acc_stderr,none": 0.004138651860160541,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.04,
						"acc_stderr,none": 0.0043828763161195125,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521493,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.0007069298939339453,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0017353579175704988,
						"acc_stderr,none": 0.0008671138796248142,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.7898358208955224,
						"acc_stderr,none": 0.1512850864363504,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343958,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426513,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426557,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.754,
						"acc_stderr,none": 0.013626065817750636,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.00985982840703718,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.736,
						"acc_stderr,none": 0.013946271849440469,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.569,
						"acc_stderr,none": 0.01566794448817351,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.668,
						"acc_stderr,none": 0.014899597242811488,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.012461592646660002,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.0045364721513064836,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118794,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.01005510343582333,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557414,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118581,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785136,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163035,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753651,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.013699915608779773,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.747,
						"acc_stderr,none": 0.01375427861358708,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.011884495834541656,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.011266140684632161,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.814,
						"acc_stderr,none": 0.012310790208412794,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727057,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.282,
						"acc_stderr,none": 0.014236526215291336,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946095,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.803,
						"acc_stderr,none": 0.012583693787968113,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.673,
						"acc_stderr,none": 0.014842213153411242,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.788,
						"acc_stderr,none": 0.012931481864938034,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.0066959566781630364,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.799,
						"acc_stderr,none": 0.012679107214617324,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653902,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.011419913065098708,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.463,
						"acc_stderr,none": 0.015775927227262416,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.771,
						"acc_stderr,none": 0.013294199326613614,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.593,
						"acc_stderr,none": 0.015543249100255544,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.401,
						"acc_stderr,none": 0.015506109745498325,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.519,
						"acc_stderr,none": 0.01580787426850585,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240629,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.01159890229868901,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942295,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.664,
						"acc_stderr,none": 0.014944140233795025,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491122,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.003444977194099818,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.709,
						"acc_stderr,none": 0.014370995982377937,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.553,
						"acc_stderr,none": 0.01573017604600907,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823332,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357796,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.01049924922240804,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.015204840912919501,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.439,
						"acc_stderr,none": 0.015701131345400774,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.0102068692643818,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.666,
						"acc_stderr,none": 0.014922019523732951,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.014174516461485265,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.01162816469672718,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.844,
						"acc_stderr,none": 0.011480235006122377,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.014553205687950443,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.819,
						"acc_stderr,none": 0.012181436179177905,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946094,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.82,
						"acc_stderr,none": 0.012155153135511963,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333333,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345684,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.482,
						"acc_stderr,none": 0.015809045699406728,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.468,
						"acc_stderr,none": 0.01578686875935901,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7474006116207951,
						"acc_stderr,none": 0.007599506862204161,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.6428571428571429,
						"acc_stderr,none": 0.06460957383809221,
						"alias": "cb",
						"f1,none": 0.47879763821792803,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.24962852897473997,
						"acc_norm,none": 0.24962852897473997,
						"acc_norm_stderr,none": 0.1076499458752758,
						"acc_stderr,none": 0.1076499458752758,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.07226812131946557,
						"acc_stderr,none": 0.07226812131946557,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275463,
						"acc_stderr,none": 0.08124094920275463,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2127659574468085,
						"acc_norm,none": 0.2127659574468085,
						"acc_norm_stderr,none": 0.06034260964773521,
						"acc_stderr,none": 0.06034260964773521,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.09829463743659811,
						"acc_stderr,none": 0.09829463743659811,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445797,
						"acc_stderr,none": 0.06180629713445797,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.07802030664724673,
						"acc_stderr,none": 0.07802030664724673,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.0625,
						"acc_norm,none": 0.0625,
						"acc_norm_stderr,none": 0.0625,
						"acc_stderr,none": 0.0625,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.08086923723833501,
						"acc_stderr,none": 0.08086923723833501,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.3548387096774194,
						"acc_norm,none": 0.3548387096774194,
						"acc_norm_stderr,none": 0.08735525166275225,
						"acc_stderr,none": 0.08735525166275225,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522561,
						"acc_stderr,none": 0.11369720523522561,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09933992677987828,
						"acc_stderr,none": 0.09933992677987828,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.11433239009500591,
						"acc_stderr,none": 0.11433239009500591,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09933992677987828,
						"acc_stderr,none": 0.09933992677987828,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736842,
						"acc_stderr,none": 0.05263157894736842,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522107,
						"acc_stderr,none": 0.10101525445522107,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.11433239009500591,
						"acc_stderr,none": 0.11433239009500591,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.13793103448275862,
						"acc_norm,none": 0.13793103448275862,
						"acc_norm_stderr,none": 0.06516628844986677,
						"acc_stderr,none": 0.06516628844986677,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141224,
						"acc_stderr,none": 0.06372446937141224,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.06390760676613884,
						"acc_stderr,none": 0.06390760676613884,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.061487546190134544,
						"acc_stderr,none": 0.061487546190134544,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482894,
						"acc_stderr,none": 0.09810018692482894,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.262303574512174,
						"acc_norm,none": 0.262303574512174,
						"acc_norm_stderr,none": 0.0446058504119728,
						"acc_stderr,none": 0.0446058504119728,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.0353866849031339,
						"acc_stderr,none": 0.0353866849031339,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03346409881055953,
						"acc_stderr,none": 0.03346409881055953,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3014354066985646,
						"acc_norm,none": 0.3014354066985646,
						"acc_norm_stderr,none": 0.031817697534233615,
						"acc_stderr,none": 0.031817697534233615,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865142,
						"acc_stderr,none": 0.03462157845865142,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.22900763358778625,
						"acc_norm,none": 0.22900763358778625,
						"acc_norm_stderr,none": 0.036853466317118506,
						"acc_stderr,none": 0.036853466317118506,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.037970424962817856,
						"acc_stderr,none": 0.037970424962817856,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2616822429906542,
						"acc_norm,none": 0.2616822429906542,
						"acc_norm_stderr,none": 0.04269291915728109,
						"acc_stderr,none": 0.04269291915728109,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.024539600216850282,
						"acc_stderr,none": 0.024539600216850282,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.030964517926923393,
						"acc_stderr,none": 0.030964517926923393,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.20670391061452514,
						"acc_norm,none": 0.20670391061452514,
						"acc_norm_stderr,none": 0.030351628795046437,
						"acc_stderr,none": 0.030351628795046437,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24050632911392406,
						"acc_norm,none": 0.24050632911392406,
						"acc_norm_stderr,none": 0.027820781981149678,
						"acc_stderr,none": 0.027820781981149678,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.1792452830188679,
						"acc_norm,none": 0.1792452830188679,
						"acc_norm_stderr,none": 0.037431386312552786,
						"acc_stderr,none": 0.037431386312552786,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.35514018691588783,
						"acc_norm,none": 0.35514018691588783,
						"acc_norm_stderr,none": 0.04648144634449115,
						"acc_stderr,none": 0.04648144634449115,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.042520162237633115,
						"acc_stderr,none": 0.042520162237633115,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980981,
						"acc_stderr,none": 0.03957835471980981,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.20952380952380953,
						"acc_norm,none": 0.20952380952380953,
						"acc_norm_stderr,none": 0.03990657150993185,
						"acc_stderr,none": 0.03990657150993185,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.04350546818999061,
						"acc_stderr,none": 0.04350546818999061,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.027984879811884505,
						"acc_stderr,none": 0.027984879811884505,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2696078431372549,
						"acc_norm,none": 0.2696078431372549,
						"acc_norm_stderr,none": 0.031145570659486782,
						"acc_stderr,none": 0.031145570659486782,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.031885780176863984,
						"acc_stderr,none": 0.031885780176863984,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.038142800826175154,
						"acc_stderr,none": 0.038142800826175154,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2733812949640288,
						"acc_norm,none": 0.2733812949640288,
						"acc_norm_stderr,none": 0.0379400712153362,
						"acc_stderr,none": 0.0379400712153362,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.27672955974842767,
						"acc_norm,none": 0.27672955974842767,
						"acc_norm_stderr,none": 0.03559177035707935,
						"acc_stderr,none": 0.03559177035707935,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2822085889570552,
						"acc_norm,none": 0.2822085889570552,
						"acc_norm_stderr,none": 0.03536117886664743,
						"acc_stderr,none": 0.03536117886664743,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.31976744186046513,
						"acc_norm,none": 0.31976744186046513,
						"acc_norm_stderr,none": 0.03566545538084812,
						"acc_stderr,none": 0.03566545538084812,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.23015873015873015,
						"acc_norm,none": 0.23015873015873015,
						"acc_norm_stderr,none": 0.02656913773613355,
						"acc_stderr,none": 0.02656913773613355,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.32323232323232326,
						"acc_norm,none": 0.32323232323232326,
						"acc_norm_stderr,none": 0.03332299921070643,
						"acc_stderr,none": 0.03332299921070643,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.028657491285071987,
						"acc_stderr,none": 0.028657491285071987,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.28695652173913044,
						"acc_norm,none": 0.28695652173913044,
						"acc_norm_stderr,none": 0.02989154167363546,
						"acc_stderr,none": 0.02989154167363546,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03785714465066654,
						"acc_stderr,none": 0.03785714465066654,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.26573426573426573,
						"acc_norm,none": 0.26573426573426573,
						"acc_norm_stderr,none": 0.037068604626235596,
						"acc_stderr,none": 0.037068604626235596,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.035208939510976534,
						"acc_stderr,none": 0.035208939510976534,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2550335570469799,
						"acc_norm,none": 0.2550335570469799,
						"acc_norm_stderr,none": 0.03582912165111174,
						"acc_stderr,none": 0.03582912165111174,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.22033898305084745,
						"acc_norm,none": 0.22033898305084745,
						"acc_norm_stderr,none": 0.03831824849223319,
						"acc_stderr,none": 0.03831824849223319,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.20909090909090908,
						"acc_norm,none": 0.20909090909090908,
						"acc_norm_stderr,none": 0.03895091015724138,
						"acc_stderr,none": 0.03895091015724138,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.27972027972027974,
						"acc_norm,none": 0.27972027972027974,
						"acc_norm_stderr,none": 0.037667638895398536,
						"acc_stderr,none": 0.037667638895398536,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.037184890068181146,
						"acc_stderr,none": 0.037184890068181146,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.03162930395697951,
						"acc_stderr,none": 0.03162930395697951,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.22674418604651161,
						"acc_norm,none": 0.22674418604651161,
						"acc_norm_stderr,none": 0.03202075899584939,
						"acc_stderr,none": 0.03202075899584939,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25790754257907544,
						"acc_norm,none": 0.25790754257907544,
						"acc_norm_stderr,none": 0.021605737836583264,
						"acc_stderr,none": 0.021605737836583264,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2616822429906542,
						"acc_norm,none": 0.2616822429906542,
						"acc_norm_stderr,none": 0.03011750436185039,
						"acc_stderr,none": 0.03011750436185039,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2764227642276423,
						"acc_norm,none": 0.2764227642276423,
						"acc_norm_stderr,none": 0.04049015460622492,
						"acc_stderr,none": 0.04049015460622492,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2459016393442623,
						"acc_norm,none": 0.2459016393442623,
						"acc_norm_stderr,none": 0.039147319035957334,
						"acc_stderr,none": 0.039147319035957334,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2761904761904762,
						"acc_norm,none": 0.2761904761904762,
						"acc_norm_stderr,none": 0.03092739584327577,
						"acc_stderr,none": 0.03092739584327577,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03305282343736877,
						"acc_stderr,none": 0.03305282343736877,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2751322751322751,
						"acc_norm,none": 0.2751322751322751,
						"acc_norm_stderr,none": 0.03257026008630314,
						"acc_stderr,none": 0.03257026008630314,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3017241379310345,
						"acc_norm,none": 0.3017241379310345,
						"acc_norm_stderr,none": 0.04280254792505459,
						"acc_stderr,none": 0.04280254792505459,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2689655172413793,
						"acc_norm,none": 0.2689655172413793,
						"acc_norm_stderr,none": 0.036951833116502325,
						"acc_stderr,none": 0.036951833116502325,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.041764667586049006,
						"acc_stderr,none": 0.041764667586049006,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03424737867752742,
						"acc_stderr,none": 0.03424737867752742,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2796208530805687,
						"acc_norm,none": 0.2796208530805687,
						"acc_norm_stderr,none": 0.030971033440870904,
						"acc_stderr,none": 0.030971033440870904,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.22340425531914893,
						"acc_norm,none": 0.22340425531914893,
						"acc_norm_stderr,none": 0.02150936503165975,
						"acc_stderr,none": 0.02150936503165975,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3146551724137931,
						"acc_norm,none": 0.3146551724137931,
						"acc_norm_stderr,none": 0.030553855290356806,
						"acc_stderr,none": 0.030553855290356806,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2988505747126437,
						"acc_norm,none": 0.2988505747126437,
						"acc_norm_stderr,none": 0.03480240745663784,
						"acc_stderr,none": 0.03480240745663784,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174021,
						"acc_stderr,none": 0.03749850709174021,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.26991150442477874,
						"acc_norm,none": 0.26991150442477874,
						"acc_norm_stderr,none": 0.029594239995417385,
						"acc_stderr,none": 0.029594239995417385,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.03501438706296781,
						"acc_stderr,none": 0.03501438706296781,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.029488391230979384,
						"acc_stderr,none": 0.029488391230979384,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.22485207100591717,
						"acc_norm,none": 0.22485207100591717,
						"acc_norm_stderr,none": 0.03220965704514524,
						"acc_stderr,none": 0.03220965704514524,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2732919254658385,
						"acc_norm,none": 0.2732919254658385,
						"acc_norm_stderr,none": 0.03523168397737091,
						"acc_stderr,none": 0.03523168397737091,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.034893706520187605,
						"acc_stderr,none": 0.034893706520187605,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.0005119811361117536,
						"mcc_stderr,none": 0.030871900310191706
					},
					"copa": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.040201512610368445,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 5.402094309573097,
						"likelihood_diff_stderr,none": 1.0042893915957558,
						"pct_stereotype,none": 0.5256410256410257,
						"pct_stereotype_stderr,none": 0.08691435651372321
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 4.63366110295004,
						"likelihood_diff_stderr,none": 0.11657221727213989,
						"pct_stereotype,none": 0.592128801431127,
						"pct_stereotype_stderr,none": 0.012004182941077534
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.5204954252138245,
						"likelihood_diff_stderr,none": 0.43776950484453325,
						"pct_stereotype,none": 0.6483516483516484,
						"pct_stereotype_stderr,none": 0.05033132318627889
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 6.1614203019575635,
						"likelihood_diff_stderr,none": 1.7872730994253616,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 7.581114607590895,
						"likelihood_diff_stderr,none": 0.7957553179975441,
						"pct_stereotype,none": 0.7230769230769231,
						"pct_stereotype_stderr,none": 0.055934767585573
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 3.5340149819850923,
						"likelihood_diff_stderr,none": 0.29200008102337444,
						"pct_stereotype,none": 0.653125,
						"pct_stereotype_stderr,none": 0.026649515182883866
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 4.441057372976233,
						"likelihood_diff_stderr,none": 0.28095619580498976,
						"pct_stereotype,none": 0.5416666666666666,
						"pct_stereotype_stderr,none": 0.03398110890294636
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.230291737450494,
						"likelihood_diff_stderr,none": 0.40343443016625463,
						"pct_stereotype,none": 0.5416666666666666,
						"pct_stereotype_stderr,none": 0.05913268547421809
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 4.609668063366507,
						"likelihood_diff_stderr,none": 0.1917509568965443,
						"pct_stereotype,none": 0.49015748031496065,
						"pct_stereotype_stderr,none": 0.022201476788942617
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.760265178508587,
						"likelihood_diff_stderr,none": 0.5276759906442954,
						"pct_stereotype,none": 0.6486486486486487,
						"pct_stereotype_stderr,none": 0.04551758693625319
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 6.416046798870128,
						"likelihood_diff_stderr,none": 0.6060015486540626,
						"pct_stereotype,none": 0.8172043010752689,
						"pct_stereotype_stderr,none": 0.04029530010615515
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.954658889770508,
						"likelihood_diff_stderr,none": 0.29108411375282905,
						"pct_stereotype,none": 0.6368421052631579,
						"pct_stereotype_stderr,none": 0.03498104083833201
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 6.167052155814856,
						"likelihood_diff_stderr,none": 0.14833841240196932,
						"pct_stereotype,none": 0.45796064400715564,
						"pct_stereotype_stderr,none": 0.012170053344890804
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 5.415813446044922,
						"likelihood_diff_stderr,none": 0.5355680648902945,
						"pct_stereotype,none": 0.5222222222222223,
						"pct_stereotype_stderr,none": 0.05294752255076824
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 4.917292961707482,
						"likelihood_diff_stderr,none": 1.3786158584518928,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 7.658457438151042,
						"likelihood_diff_stderr,none": 0.8891428642269054,
						"pct_stereotype,none": 0.4090909090909091,
						"pct_stereotype_stderr,none": 0.060983672113630656
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 4.723585752683265,
						"likelihood_diff_stderr,none": 0.25767290849021546,
						"pct_stereotype,none": 0.5482866043613707,
						"pct_stereotype_stderr,none": 0.0278202042048158
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 8.947603203091226,
						"likelihood_diff_stderr,none": 0.4528754143651774,
						"pct_stereotype,none": 0.2924901185770751,
						"pct_stereotype_stderr,none": 0.02865639690849427
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 6.409296247694227,
						"likelihood_diff_stderr,none": 0.8061832154301538,
						"pct_stereotype,none": 0.5555555555555556,
						"pct_stereotype_stderr,none": 0.05897165471491952
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 5.499456737352454,
						"likelihood_diff_stderr,none": 0.25738923062719915,
						"pct_stereotype,none": 0.3804347826086957,
						"pct_stereotype_stderr,none": 0.022660906553299328
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 7.154560221796451,
						"likelihood_diff_stderr,none": 0.6137848142093544,
						"pct_stereotype,none": 0.391304347826087,
						"pct_stereotype_stderr,none": 0.04570934635111714
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 7.0168393313229735,
						"likelihood_diff_stderr,none": 0.6857192284257431,
						"pct_stereotype,none": 0.7472527472527473,
						"pct_stereotype_stderr,none": 0.04580951853732891
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 5.4095903708010304,
						"likelihood_diff_stderr,none": 0.41507578256341376,
						"pct_stereotype,none": 0.5510204081632653,
						"pct_stereotype_stderr,none": 0.03561884533975955
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0172244094488189,
						"exact_match_stderr,none": 0.0028869840818920704
					},
					"glue": {
						"acc,none": 0.5009835464592854,
						"acc_stderr,none": 0.03353312543236152,
						"alias": "glue",
						"f1,none": 0.3909623716904163,
						"f1_stderr,none": 2.6934101172870805e-05,
						"mcc,none": 0.0005119811361117536,
						"mcc_stderr,none": 0.0009530742287624148
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.3070507960576194,
						"exact_match_stderr,get-answer": 0.0127056857231317
					},
					"hellaswag": {
						"acc,none": 0.4795857398924517,
						"acc_norm,none": 0.6260705038836885,
						"acc_norm_stderr,none": 0.004828564090620289,
						"acc_stderr,none": 0.004985620773683443,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.10023101357204736,
						"acc_norm,none": 0.10023101357204736,
						"acc_norm_stderr,none": 0.06394552161681331,
						"acc_stderr,none": 0.06394552161681331,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.03861229196653697,
						"acc_stderr,none": 0.03861229196653697,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.094,
						"acc_norm,none": 0.094,
						"acc_norm_stderr,none": 0.009233052000787735,
						"acc_stderr,none": 0.009233052000787735,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.082,
						"acc_norm,none": 0.082,
						"acc_norm_stderr,none": 0.00868051561552369,
						"acc_stderr,none": 0.00868051561552369,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.197,
						"acc_norm,none": 0.197,
						"acc_norm_stderr,none": 0.012583693787968135,
						"acc_stderr,none": 0.012583693787968135,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.192,
						"acc_norm,none": 0.192,
						"acc_norm_stderr,none": 0.012461592646659967,
						"acc_stderr,none": 0.012461592646659967,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.175,
						"acc_norm,none": 0.175,
						"acc_norm_stderr,none": 0.01552503498177411,
						"acc_stderr,none": 0.01552503498177411,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.01,
						"acc_norm,none": 0.01,
						"acc_norm_stderr,none": 0.003148000938676774,
						"acc_stderr,none": 0.003148000938676774,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.008,
						"acc_norm,none": 0.008,
						"acc_norm_stderr,none": 0.0028185003005045074,
						"acc_stderr,none": 0.0028185003005045074,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.02,
						"acc_norm,none": 0.02,
						"acc_norm_stderr,none": 0.004429403980178373,
						"acc_stderr,none": 0.004429403980178373,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.04,
						"acc_norm,none": 0.04,
						"acc_norm_stderr,none": 0.006199874066337038,
						"acc_stderr,none": 0.006199874066337038,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.040347329239296424,
						"acc_stderr,none": 0.040347329239296424,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.019,
						"acc_norm,none": 0.019,
						"acc_norm_stderr,none": 0.004319451082910613,
						"acc_stderr,none": 0.004319451082910613,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.017,
						"acc_norm,none": 0.017,
						"acc_norm_stderr,none": 0.004089954489689086,
						"acc_stderr,none": 0.004089954489689086,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.179,
						"acc_norm,none": 0.179,
						"acc_norm_stderr,none": 0.012128730605719118,
						"acc_stderr,none": 0.012128730605719118,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.031,
						"acc_norm,none": 0.031,
						"acc_norm_stderr,none": 0.005483527064679197,
						"acc_stderr,none": 0.005483527064679197,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.133,
						"acc_norm,none": 0.133,
						"acc_norm_stderr,none": 0.01074366913239733,
						"acc_stderr,none": 0.01074366913239733,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.11,
						"acc_norm,none": 0.11,
						"acc_norm_stderr,none": 0.009899393819724437,
						"acc_stderr,none": 0.009899393819724437,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.084,
						"acc_norm,none": 0.084,
						"acc_norm_stderr,none": 0.008776162089491111,
						"acc_stderr,none": 0.008776162089491111,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.082,
						"acc_norm,none": 0.082,
						"acc_norm_stderr,none": 0.008680515615523713,
						"acc_stderr,none": 0.008680515615523713,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.021,
						"acc_norm,none": 0.021,
						"acc_norm_stderr,none": 0.0045364721513064974,
						"acc_stderr,none": 0.0045364721513064974,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.034,
						"acc_norm,none": 0.034,
						"acc_norm_stderr,none": 0.005733836139695457,
						"acc_stderr,none": 0.005733836139695457,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.061,
						"acc_norm,none": 0.061,
						"acc_norm_stderr,none": 0.007572076091557425,
						"acc_stderr,none": 0.007572076091557425,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.231,
						"acc_norm,none": 0.231,
						"acc_norm_stderr,none": 0.013334797216936428,
						"acc_stderr,none": 0.013334797216936428,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.077,
						"acc_norm,none": 0.077,
						"acc_norm_stderr,none": 0.00843458014024066,
						"acc_stderr,none": 0.00843458014024066,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.012285191326386708,
						"acc_stderr,none": 0.012285191326386708,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.15166666666666667,
						"acc_norm,none": 0.15166666666666667,
						"acc_norm_stderr,none": 0.014655982094924858,
						"acc_stderr,none": 0.014655982094924858,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.102,
						"acc_norm,none": 0.102,
						"acc_norm_stderr,none": 0.009575368801653897,
						"acc_stderr,none": 0.009575368801653897,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.109,
						"acc_norm,none": 0.109,
						"acc_norm_stderr,none": 0.009859828407037185,
						"acc_stderr,none": 0.009859828407037185,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.061,
						"acc_norm,none": 0.061,
						"acc_norm_stderr,none": 0.007572076091557422,
						"acc_stderr,none": 0.007572076091557422,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.078,
						"acc_norm,none": 0.078,
						"acc_norm_stderr,none": 0.008484573530118583,
						"acc_stderr,none": 0.008484573530118583,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22333333333333333,
						"acc_norm,none": 0.22333333333333333,
						"acc_norm_stderr,none": 0.024085657867318574,
						"acc_stderr,none": 0.024085657867318574,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.232,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.013354937452281576,
						"acc_stderr,none": 0.013354937452281576,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.046,
						"acc_norm,none": 0.046,
						"acc_norm_stderr,none": 0.006627814717380713,
						"acc_stderr,none": 0.006627814717380713,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.113,
						"acc_norm,none": 0.113,
						"acc_norm_stderr,none": 0.010016552866696837,
						"acc_stderr,none": 0.010016552866696837,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.027234326551496862,
						"acc_stderr,none": 0.027234326551496862,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.139,
						"acc_norm,none": 0.139,
						"acc_norm_stderr,none": 0.01094526376104296,
						"acc_stderr,none": 0.01094526376104296,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.153,
						"acc_norm,none": 0.153,
						"acc_norm_stderr,none": 0.011389500459665544,
						"acc_stderr,none": 0.011389500459665544,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.021,
						"acc_norm,none": 0.021,
						"acc_norm_stderr,none": 0.004536472151306521,
						"acc_stderr,none": 0.004536472151306521,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.4841043630782723,
						"acc_norm,none": 0.432,
						"acc_norm_stderr,none": 0.0004917354709418823,
						"acc_stderr,none": 0.04318103022327812,
						"alias": "kobest",
						"f1,none": 0.38759233902334245,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.482,
						"acc_stderr,none": 0.015809045699406728,
						"alias": " - kobest_copa",
						"f1,none": 0.4817014600409836,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.34,
						"acc_norm,none": 0.432,
						"acc_norm_stderr,none": 0.02217510926561315,
						"acc_stderr,none": 0.021206117013673066,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.337661243457994,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5944584382871536,
						"acc_stderr,none": 0.02467350455163343,
						"alias": " - kobest_sentineg",
						"f1,none": 0.59109607006455,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.4364447894430429,
						"acc_stderr,none": 0.045630131502273276,
						"alias": "lambada",
						"perplexity,none": 21.959680429510197,
						"perplexity_stderr,none": 6.584497341384194
					},
					"lambada_cloze": {
						"acc,none": 0.1342907044440132,
						"acc_stderr,none": 0.03591374323110126,
						"alias": "lambada_cloze",
						"perplexity,none": 271.50389603631925,
						"perplexity_stderr,none": 83.7389450632914
					},
					"lambada_multilingual": {
						"acc,none": 0.2034542984669125,
						"acc_stderr,none": 0.09840718188692064,
						"alias": "lambada_multilingual",
						"perplexity,none": 8998.898217246948,
						"perplexity_stderr,none": 6694.966618108296
					},
					"lambada_openai": {
						"acc,none": 0.527459732194838,
						"acc_stderr,none": 0.006955464515621099,
						"alias": " - lambada_openai",
						"perplexity,none": 8.952081666500574,
						"perplexity_stderr,none": 0.29940013941158355
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.20551135261013,
						"acc_stderr,none": 0.005629551929067118,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 105.47085282751841,
						"perplexity_stderr,none": 4.012815731462236
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.07005627789637105,
						"acc_stderr,none": 0.0035560178802326987,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 25766.206394192504,
						"perplexity_stderr,none": 2132.8849659373473
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.5264894236367165,
						"acc_stderr,none": 0.006956194880237212,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 8.948070912580201,
						"perplexity_stderr,none": 0.29933800610404665
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.14671065398796818,
						"acc_stderr,none": 0.004929365951015957,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 2893.483529412879,
						"perplexity_stderr,none": 223.57318792973112
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.16320589947603337,
						"acc_stderr,none": 0.005148601801926456,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 2461.9539951590864,
						"perplexity_stderr,none": 184.56651892579842
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.11080923733747332,
						"acc_stderr,none": 0.004373181748567912,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 13863.89909655769,
						"perplexity_stderr,none": 1114.6262970177172
					},
					"lambada_standard": {
						"acc,none": 0.346206093537745,
						"acc_stderr,none": 0.006628264962716326,
						"alias": " - lambada_standard",
						"perplexity,none": 34.97243596655308,
						"perplexity_stderr,none": 1.3956248325322254
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.06307005627789637,
						"acc_stderr,none": 0.0033867040067458868,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 437.53693924512004,
						"perplexity_stderr,none": 14.950556234162482
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.3638676844783715,
						"exact_match_stderr,get-answer": 0.012138286395027864
					},
					"logiqa": {
						"acc,none": 0.23348694316436253,
						"acc_norm,none": 0.29185867895545314,
						"acc_norm_stderr,none": 0.017831570553971925,
						"acc_stderr,none": 0.016593362460570887,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2589058524173028,
						"acc_norm,none": 0.2926208651399491,
						"acc_norm_stderr,none": 0.011478646336639116,
						"acc_stderr,none": 0.011051456868610532,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.3001675041876047,
						"acc_norm,none": 0.2994974874371859,
						"acc_norm_stderr,none": 0.008384979997770062,
						"acc_stderr,none": 0.008390338453387805,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3478076678669773,
						"acc_stderr,none": 0.004901719120663915,
						"alias": "mc_taco",
						"f1,none": 0.5041867954911433,
						"f1_stderr,none": 0.005497157165352499
					},
					"medmcqa": {
						"acc,none": 0.3468802295003586,
						"acc_norm,none": 0.3468802295003586,
						"acc_norm_stderr,none": 0.007360277268141631,
						"acc_stderr,none": 0.007360277268141631,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.30950510604870385,
						"acc_norm,none": 0.30950510604870385,
						"acc_norm_stderr,none": 0.012961957380504976,
						"acc_stderr,none": 0.012961957380504976,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.4156103119213787,
						"acc_stderr,none": 0.09580239912197347,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4148148148148148,
						"acc_stderr,none": 0.04256193767901407,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3815789473684211,
						"acc_stderr,none": 0.03953173377749194,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.05021167315686781,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4679245283018868,
						"acc_stderr,none": 0.030709486992556538,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3402777777777778,
						"acc_stderr,none": 0.03962135573486219,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3872832369942196,
						"acc_stderr,none": 0.03714325906302065,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.30392156862745096,
						"acc_stderr,none": 0.04576665403207764,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.050211673156867795,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3659574468085106,
						"acc_stderr,none": 0.031489558297455304,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2719298245614035,
						"acc_stderr,none": 0.04185774424022057,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4689655172413793,
						"acc_stderr,none": 0.04158632762097828,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.30952380952380953,
						"acc_stderr,none": 0.023809523809523864,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.04006168083848876,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.47096774193548385,
						"acc_stderr,none": 0.02839601640276099,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.32019704433497537,
						"acc_stderr,none": 0.032826493853041504,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.05024183937956912,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.5272727272727272,
						"acc_stderr,none": 0.03898531605579418,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.5151515151515151,
						"acc_stderr,none": 0.03560716516531061,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5492227979274611,
						"acc_stderr,none": 0.03590910952235525,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.41025641025641024,
						"acc_stderr,none": 0.02493931390694079,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.23703703703703705,
						"acc_stderr,none": 0.025928876132766118,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.4369747899159664,
						"acc_stderr,none": 0.03221943636566196,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.31125827814569534,
						"acc_stderr,none": 0.03780445850526733,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5706422018348624,
						"acc_stderr,none": 0.0212222863972365,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.25462962962962965,
						"acc_stderr,none": 0.02971127586000535,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.03509312031717982,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5569620253164557,
						"acc_stderr,none": 0.03233532777533484,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4618834080717489,
						"acc_stderr,none": 0.033460150119732274,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.5343511450381679,
						"acc_stderr,none": 0.043749285605997376,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3834218916046759,
						"acc_stderr,none": 0.09980914366380608,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.6363636363636364,
						"acc_stderr,none": 0.04391326286724071,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.5555555555555556,
						"acc_stderr,none": 0.04803752235190193,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.5153374233128835,
						"acc_stderr,none": 0.039265223787088445,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.39285714285714285,
						"acc_stderr,none": 0.04635550135609976,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.6116504854368932,
						"acc_stderr,none": 0.04825729337356391,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.6794871794871795,
						"acc_stderr,none": 0.03057281131029961,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.049999999999999996,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5006385696040868,
						"acc_stderr,none": 0.017879948914431676,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.5144508670520231,
						"acc_stderr,none": 0.02690784985628254,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.22681564245810057,
						"acc_stderr,none": 0.014005843570897908,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.5098039215686274,
						"acc_stderr,none": 0.02862441255016795,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.456710653363373,
						"acc_stderr,none": 0.08819785463063003,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4565916398713826,
						"acc_stderr,none": 0.0282908690541976,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.4104938271604938,
						"acc_stderr,none": 0.027371350925124768,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.28368794326241137,
						"acc_stderr,none": 0.026891709428343957,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3259452411994785,
						"acc_stderr,none": 0.011971507294982777,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3014705882352941,
						"acc_stderr,none": 0.027875982114273168,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.4019607843137255,
						"acc_stderr,none": 0.01983517648437539,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.509090909090909,
						"acc_stderr,none": 0.04788339768702861,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.46938775510204084,
						"acc_stderr,none": 0.031949171367580624,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4874878128046798,
						"acc_stderr,none": 0.08668149918781964,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.6716417910447762,
						"acc_stderr,none": 0.033206858897443244,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.35299714557564227,
						"acc_stderr,none": 0.07260160225767888,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.64,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.41566265060240964,
						"acc_stderr,none": 0.03836722176598052,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.4152046783625731,
						"acc_stderr,none": 0.037792759455032,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.5106469689251146,
						"acc_stderr,none": 0.005046014495000337,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.5247152156224573,
						"acc_stderr,none": 0.005036628707512172,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.4215686274509804,
						"acc_stderr,none": 0.024477263169831672,
						"alias": "mrpc",
						"f1,none": 0.3621621621621622,
						"f1_stderr,none": 0.03205846794892418
					},
					"multimedqa": {
						"acc,none": 0.36990773598296667,
						"acc_norm,none": 0.33304231926291905,
						"acc_norm_stderr,none": 0.00013082869092342827,
						"acc_stderr,none": 0.08369341742374357,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.4573019801980198,
						"acc_stderr,none": 0.007155568599175842,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6803047420208933,
						"mrr_stderr,none": 0.01038155751006059,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.42776523702031605,
						"r@2_stderr,none": 0.016630994786546345
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6494544787546851,
						"mrr_stderr,none": 0.010585965554227134,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4492099322799097,
						"r@2_stderr,none": 0.016720377939562166
					},
					"openbookqa": {
						"acc,none": 0.38,
						"acc_norm,none": 0.48,
						"acc_norm_stderr,none": 0.022365160424231336,
						"acc_stderr,none": 0.02172888143870171,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4425,
						"acc_stderr,none": 0.01110894141174761,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.425,
						"acc_stderr,none": 0.011056609982818337,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4505,
						"acc_stderr,none": 0.011128198119942876,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5195,
						"acc_stderr,none": 0.011174628009718154,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5495,
						"acc_stderr,none": 0.01112819811994288,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.011122493197456286,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4465,
						"acc_stderr,none": 0.01111893386729012,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48364285714285715,
						"acc_stderr,none": 0.03742491672433065,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7622415669205659,
						"acc_norm,none": 0.7595212187159956,
						"acc_norm_stderr,none": 0.009971345364651073,
						"acc_stderr,none": 0.009932525779525485,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.3142613151152861,
						"acc_norm,none": 0.34345644748078563,
						"acc_norm_stderr,none": 0.003469291231279199,
						"acc_stderr,none": 0.0033915477676620506,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.678,
						"acc_stderr,none": 0.02091666833001988,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7112002692685127,
						"acc_norm,none": 0.6498530476108814,
						"acc_norm_stderr,none": 0.0035303023483130268,
						"acc_stderr,none": 0.14901000015478819,
						"alias": "pythia",
						"bits_per_byte,none": 0.9100384399401059,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.8790955653906523,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 8.952081666500574,
						"perplexity_stderr,none": 0.29940013941158355,
						"word_perplexity,none": 29.169437028524897,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.39184397163120566,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.05314683784813257,
						"acc_stderr,none": 0.03643101455378746,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.5416666666666666,
						"acc_norm_stderr,none": 0.04567549854280213,
						"acc_stderr,none": 0.0451938453788867,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.38125,
						"acc_norm,none": 0.44375,
						"acc_norm_stderr,none": 0.039400853796259426,
						"acc_stderr,none": 0.038518021388670956,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3873239436619718,
						"acc_norm,none": 0.3485915492957746,
						"acc_norm_stderr,none": 0.028326433924036696,
						"acc_stderr,none": 0.028957389575950964,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5367014460918909,
						"acc_stderr,none": 0.006747159971243198,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.4799159040316597,
						"acc_stderr,none": 0.002484693696428313,
						"alias": "qqp",
						"f1,none": 0.3913861472112073,
						"f1_stderr,none": 0.003325824910140327
					},
					"race": {
						"acc,none": 0.37799043062200954,
						"acc_stderr,none": 0.015006820447473675,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5848375451263538,
						"acc_stderr,none": 0.029660066290893485,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.933,
						"acc_norm,none": 0.916,
						"acc_norm_stderr,none": 0.008776162089491087,
						"acc_stderr,none": 0.007910345983177547,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5848375451263538,
						"acc_stderr,none": 0.029660066290893485,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8589449541284404,
						"acc_stderr,none": 0.01179418408824394,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.508297510746776,
						"acc_norm,none": 0.6681995401379586,
						"acc_norm_stderr,none": 0.0033290651940302528,
						"acc_stderr,none": 0.0035346052406485464,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5724934278393398,
						"acc_stderr,none": 0.043529050370286135,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5098157051282052,
						"acc_stderr,none": 0.005003291032836252,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.6810580723624202,
						"acc_stderr,none": 0.00469220874106159,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5288235294117647,
						"acc_stderr,none": 0.004942747062078341,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.33768559231342077,
						"acc_stderr,none": 0.0014856093902889046,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.32558139534883723,
						"bleu_acc_stderr,none": 0.01640398946990783,
						"bleu_diff,none": -4.6115065977579235,
						"bleu_diff_stderr,none": 0.8901793950229382,
						"bleu_max,none": 28.188061252436995,
						"bleu_max_stderr,none": 0.7905622059974436,
						"rouge1_acc,none": 0.33047735618115054,
						"rouge1_acc_stderr,none": 0.016466769613698303,
						"rouge1_diff,none": -4.875337395009971,
						"rouge1_diff_stderr,none": 1.0314030062890773,
						"rouge1_max,none": 53.54443904556872,
						"rouge1_max_stderr,none": 0.8354753910839761,
						"rouge2_acc,none": 0.2876376988984088,
						"rouge2_acc_stderr,none": 0.015846315101394805,
						"rouge2_diff,none": -6.262800072222912,
						"rouge2_diff_stderr,none": 1.2035260865490716,
						"rouge2_max,none": 38.764281938826,
						"rouge2_max_stderr,none": 0.9852926715679251,
						"rougeL_acc,none": 0.31946144430844553,
						"rougeL_acc_stderr,none": 0.0163226441829605,
						"rougeL_diff,none": -5.1087067052813175,
						"rougeL_diff_stderr,none": 1.0442388598358738,
						"rougeL_max,none": 50.952837351048444,
						"rougeL_max_stderr,none": 0.862823336910652
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.32558139534883723,
						"bleu_acc_stderr,none": 0.01640398946990783,
						"bleu_diff,none": -4.6115065977579235,
						"bleu_diff_stderr,none": 0.8901793950229382,
						"bleu_max,none": 28.188061252436995,
						"bleu_max_stderr,none": 0.7905622059974436,
						"rouge1_acc,none": 0.33047735618115054,
						"rouge1_acc_stderr,none": 0.016466769613698303,
						"rouge1_diff,none": -4.875337395009971,
						"rouge1_diff_stderr,none": 1.0314030062890773,
						"rouge1_max,none": 53.54443904556872,
						"rouge1_max_stderr,none": 0.8354753910839761,
						"rouge2_acc,none": 0.2876376988984088,
						"rouge2_acc_stderr,none": 0.015846315101394805,
						"rouge2_diff,none": -6.262800072222912,
						"rouge2_diff_stderr,none": 1.2035260865490716,
						"rouge2_max,none": 38.764281938826,
						"rouge2_max_stderr,none": 0.9852926715679251,
						"rougeL_acc,none": 0.31946144430844553,
						"rougeL_acc_stderr,none": 0.0163226441829605,
						"rougeL_diff,none": -5.1087067052813175,
						"rougeL_diff_stderr,none": 1.0442388598358738,
						"rougeL_max,none": 50.952837351048444,
						"rougeL_max_stderr,none": 0.862823336910652
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2668298653610771,
						"acc_stderr,none": 0.015483691939237272,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.40854131926576437,
						"acc_stderr,none": 0.014830756408738311,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.0172244094488189,
						"exact_match_stderr,none": 0.0028869840818920704
					},
					"wic": {
						"acc,none": 0.4780564263322884,
						"acc_stderr,none": 0.019791633564310455,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.9100384399401059,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.8790955653906523,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 29.169437028524897,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7308602999210734,
						"acc_stderr,none": 0.012464911951268736,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.43661971830985913,
						"acc_stderr,none": 0.0592793555841297,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.5865384615384616,
						"acc_stderr,none": 0.04852294969729053,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7655677655677655,
						"acc_stderr,none": 0.02568715645908419,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5216363636363637,
						"acc_stderr,none": 0.02619795891999274,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.02238235778196213,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.02238289498648353,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.022352791650914156,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.022361396739207867,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.02238235778196213,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.022311333245289663,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.02227087748536044,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.022279694107843424,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.02236139673920787,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.3557429718875502,
						"acc_stderr,none": 0.0470955966598085,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3369477911646586,
						"acc_stderr,none": 0.009474203778757715,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3329317269076305,
						"acc_stderr,none": 0.00944605100135822,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.348995983935743,
						"acc_stderr,none": 0.009554095988300667,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3369477911646586,
						"acc_stderr,none": 0.009474203778757722,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5481927710843374,
						"acc_stderr,none": 0.009975410845717828,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.357429718875502,
						"acc_stderr,none": 0.009606013646043455,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.3642570281124498,
						"acc_stderr,none": 0.009645667910246849,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3530120481927711,
						"acc_stderr,none": 0.00957922584070971,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.3506024096385542,
						"acc_stderr,none": 0.009564237156206102,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3236947791164659,
						"acc_stderr,none": 0.009378357180373087,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.009460223484996465,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939167,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3469879518072289,
						"acc_stderr,none": 0.009541251561568397,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.336144578313253,
						"acc_stderr,none": 0.009468634669293529,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3305220883534137,
						"acc_stderr,none": 0.009428789109289817,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5175982191203898,
						"acc_stderr,none": 0.07780934553349826,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.47650562541363334,
						"acc_stderr,none": 0.012852912530051752,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7809397749834547,
						"acc_stderr,none": 0.010643931294349715,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5168762409000662,
						"acc_stderr,none": 0.012859793919977606,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5036399735274653,
						"acc_stderr,none": 0.01286678434828923,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.47187293183322304,
						"acc_stderr,none": 0.012846749995797692,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.4751819986763733,
						"acc_stderr,none": 0.012851264962354846,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4818001323626737,
						"acc_stderr,none": 0.012858598401831846,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.4784910655195235,
						"acc_stderr,none": 0.012855214257296594,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.4930509596293845,
						"acc_stderr,none": 0.01286588257096072,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5175380542686963,
						"acc_stderr,none": 0.012859207453266306,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.49768365320979485,
						"acc_stderr,none": 0.012866987239478047,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.6792537648909868,
						"acc_stderr,none": 0.0899142305270184,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8318279569892473,
						"acc_stderr,none": 0.007758461234229165,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.4939759036144578,
						"acc_stderr,none": 0.055211755360913765,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5005213764337852,
						"acc_stderr,none": 0.016154257961240533,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.55893536121673,
						"acc_stderr,none": 0.030674766664426298,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5365079365079365,
						"acc_stderr,none": 0.028141315964997575,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.498015873015873,
						"acc_stderr,none": 0.022293722571246784,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "microsoft/phi-1_5"
	},
	"microsoft/phi-2": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.709695603156708,
						"acc_norm,none": 0.6992671927846674,
						"acc_norm_stderr,none": 0.03945279560284417,
						"acc_stderr,none": 0.043663093078136095,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3984375,
						"acc_stderr,none": 0.016760418340298115,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.27095,
						"acc_stderr,none": 0.22106792764880567,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8034179104477611,
						"acc_stderr,none": 0.15225190829812563,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.29569093610698366,
						"acc_norm,none": 0.29569093610698366,
						"acc_norm_stderr,none": 0.1206499388250582,
						"acc_stderr,none": 0.1206499388250582,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.28811949576929713,
						"acc_norm,none": 0.28811949576929713,
						"acc_norm_stderr,none": 0.04771127590627858,
						"acc_stderr,none": 0.04771127590627858,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.181839169244647,
						"likelihood_diff_stderr,none": 0.5607906008510215,
						"pct_stereotype,none": 0.5682766845557544,
						"pct_stereotype_stderr,none": 0.09236396956361448
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.034940944881889764,
						"exact_match_stderr,none": 0.004074640578111424
					},
					"glue": {
						"acc,none": 0.5272280365583858,
						"acc_stderr,none": 0.036256377458803654,
						"alias": "glue",
						"f1,none": 0.4514060837807571,
						"f1_stderr,none": 0.0010924081558753881,
						"mcc,none": 0.14309681607209304,
						"mcc_stderr,none": 0.0009276166769477274
					},
					"kmmlu": {
						"acc,none": 0.179151025122726,
						"acc_norm,none": 0.179151025122726,
						"acc_norm_stderr,none": 0.038678746658561663,
						"acc_stderr,none": 0.038678746658561663,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.47138785354089013,
						"acc_norm,none": 0.424,
						"acc_norm_stderr,none": 0.0004894268537074177,
						"acc_stderr,none": 0.040579888948371924,
						"alias": "kobest",
						"f1,none": 0.36424351286352064,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5460896565107705,
						"acc_stderr,none": 0.03275215227644765,
						"alias": "lambada",
						"perplexity,none": 9.543702595750869,
						"perplexity_stderr,none": 1.8325936371002682
					},
					"lambada_cloze": {
						"acc,none": 0.0723850184358626,
						"acc_stderr,none": 0.01820903337801098,
						"alias": "lambada_cloze",
						"perplexity,none": 366.66627229948585,
						"perplexity_stderr,none": 106.94063108331319
					},
					"lambada_multilingual": {
						"acc,none": 0.2908208810401708,
						"acc_stderr,none": 0.11431811937529752,
						"alias": "lambada_multilingual",
						"perplexity,none": 612.5333247891786,
						"perplexity_stderr,none": 317.2840073227033
					},
					"mmlu": {
						"acc,none": 0.5435835351089588,
						"acc_stderr,none": 0.12867933183083602,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.48862911795961744,
						"acc_stderr,none": 0.15315524389906154,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.6021886063727068,
						"acc_stderr,none": 0.08697609602962406,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.64478388040299,
						"acc_stderr,none": 0.0950214421255835,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.46907706945765937,
						"acc_stderr,none": 0.10934553669332531,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.4360539389638041,
						"acc_norm,none": 0.3866237546936221,
						"acc_norm_stderr,none": 0.00010396317114414426,
						"acc_stderr,none": 0.08521544293717373,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.47664285714285715,
						"acc_stderr,none": 0.040151566652334436,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7476669195373873,
						"acc_norm,none": 0.7007782389502576,
						"acc_norm_stderr,none": 0.0033484835597671313,
						"acc_stderr,none": 0.149535192480841,
						"alias": "pythia",
						"bits_per_byte,none": 0.6511297505023197,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5703974665687999,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.936726119786097,
						"perplexity_stderr,none": 0.1612343224527864,
						"word_perplexity,none": 11.17253144821841,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4397163120567376,
						"acc_norm,none": 0.4734042553191489,
						"acc_norm_stderr,none": 0.06316389564033567,
						"acc_stderr,none": 0.04430979896518065,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.7743169944427806,
						"acc_stderr,none": 0.08541932751995714,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.39610199905344434,
						"acc_stderr,none": 0.04520086403288278,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.37821297429620565,
						"bleu_acc_stderr,none": 0.0002881959808458651,
						"bleu_diff,none": -2.2779872656744,
						"bleu_diff_stderr,none": 0.8549006596403927,
						"bleu_max,none": 30.46153324391041,
						"bleu_max_stderr,none": 0.6771264546254333,
						"rouge1_acc,none": 0.38310893512851896,
						"rouge1_acc_stderr,none": 0.0002896280379328611,
						"rouge1_diff,none": -2.2806195346786753,
						"rouge1_diff_stderr,none": 1.2053901795631718,
						"rouge1_max,none": 56.19075110872051,
						"rouge1_max_stderr,none": 0.7074586044524152,
						"rouge2_acc,none": 0.35495716034271724,
						"rouge2_acc_stderr,none": 0.00028059139051979436,
						"rouge2_diff,none": -3.1369550564443758,
						"rouge2_diff_stderr,none": 1.6280864816942957,
						"rouge2_max,none": 41.80472763325914,
						"rouge2_max_stderr,none": 1.0270015419774554,
						"rougeL_acc,none": 0.3708690330477356,
						"rougeL_acc_stderr,none": 0.00028593773697790806,
						"rougeL_diff,none": -2.2530349744358436,
						"rougeL_diff_stderr,none": 1.250153037652559,
						"rougeL_max,none": 53.416953676625305,
						"rougeL_max_stderr,none": 0.7497627896676777
					},
					"xcopa": {
						"acc,none": 0.5121818181818181,
						"acc_stderr,none": 0.02974222764408025,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.3601338688085676,
						"acc_stderr,none": 0.04586954745748627,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5279465736116961,
						"acc_stderr,none": 0.07960746269436779,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.698583951449764,
						"acc_stderr,none": 0.09689504243701293,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.709695603156708,
						"acc_norm,none": 0.6992671927846674,
						"acc_norm_stderr,none": 0.03945279560284417,
						"acc_stderr,none": 0.043663093078136095,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3984375,
						"acc_stderr,none": 0.016760418340298115,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.418,
						"acc_stderr,none": 0.015605111967541947,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.393,
						"acc_stderr,none": 0.015452824654081496,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.38666666666666666,
						"acc_stderr,none": 0.014063941778353479,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.5273037542662116,
						"acc_norm,none": 0.5358361774744027,
						"acc_norm_stderr,none": 0.014573813664735718,
						"acc_stderr,none": 0.014589589101985989,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7996632996632996,
						"acc_norm,none": 0.7798821548821548,
						"acc_norm_stderr,none": 0.008501788774716787,
						"acc_stderr,none": 0.008213003984949967,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.27095,
						"acc_stderr,none": 0.22106792764880567,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.263,
						"acc_stderr,none": 0.009847029094655511,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.712,
						"acc_stderr,none": 0.01012814344511474,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.5035,
						"acc_stderr,none": 0.011182862030875634,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.00931014712127108,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.1485,
						"acc_stderr,none": 0.0079533328077842,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.264,
						"acc_stderr,none": 0.009859036479299192,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0065,
						"acc_stderr,none": 0.0017973564602277762,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.033,
						"acc_stderr,none": 0.003995432609977368,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.002,
						"acc_stderr,none": 0.0009992493430694884,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.03427331887201735,
						"acc_stderr,none": 0.0037902159819584434,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8034179104477611,
						"acc_stderr,none": 0.15225190829812563,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408058,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.003444977194099832,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.013019735539307804,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787724,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.754,
						"acc_stderr,none": 0.013626065817750636,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.551,
						"acc_stderr,none": 0.015736792768752023,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.731,
						"acc_stderr,none": 0.014029819522568196,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341674,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.004089954489689072,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410038,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.00828206451270415,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033839,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919294,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847167,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074789,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426129,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.011884495834541663,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.64,
						"acc_stderr,none": 0.015186527932040119,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.014414290540008218,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.837,
						"acc_stderr,none": 0.011686212712746832,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946104,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378235,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698453,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.423,
						"acc_stderr,none": 0.015630589090476345,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.788,
						"acc_stderr,none": 0.012931481864938017,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.647,
						"acc_stderr,none": 0.01512017260548369,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.013106173040661754,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099179,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248114,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837957,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.677,
						"acc_stderr,none": 0.014794927843348633,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.011884495834541643,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.409,
						"acc_stderr,none": 0.015555094373257944,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.015316971293620996,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.015740004693383852,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.838,
						"acc_stderr,none": 0.011657267771304408,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.015816135752773196,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621219,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525037,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.751,
						"acc_stderr,none": 0.013681600278702287,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000106,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.00905439020486644,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987286,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.011772110370812187,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.014046255632633916,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.557,
						"acc_stderr,none": 0.0157161699532041,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.010131468138756993,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140911,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.687,
						"acc_stderr,none": 0.014671272822977888,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.407,
						"acc_stderr,none": 0.015543249100255545,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.837,
						"acc_stderr,none": 0.011686212712746847,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847165,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.015316971293620996,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696844,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408049,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.756,
						"acc_stderr,none": 0.013588548437881416,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.01281855355784399,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240627,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665549,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.971,
						"acc_stderr,none": 0.005309160685756967,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.007088105617246446,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.422,
						"acc_stderr,none": 0.01562562511262067,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.393,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.8324159021406727,
						"acc_stderr,none": 0.0065324956127469445,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.7678571428571429,
						"acc_stderr,none": 0.0569293902400011,
						"alias": "cb",
						"f1,none": 0.6140350877192983,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.29569093610698366,
						"acc_norm,none": 0.29569093610698366,
						"acc_norm_stderr,none": 0.1206499388250582,
						"acc_stderr,none": 0.1206499388250582,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.1836734693877551,
						"acc_norm,none": 0.1836734693877551,
						"acc_norm_stderr,none": 0.05589005688828229,
						"acc_stderr,none": 0.05589005688828229,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.42424242424242425,
						"acc_norm,none": 0.42424242424242425,
						"acc_norm_stderr,none": 0.08736789844447573,
						"acc_stderr,none": 0.08736789844447573,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122594,
						"acc_stderr,none": 0.08503766788122594,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.3404255319148936,
						"acc_norm,none": 0.3404255319148936,
						"acc_norm_stderr,none": 0.06986570800554746,
						"acc_stderr,none": 0.06986570800554746,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.06288639360110458,
						"acc_stderr,none": 0.06288639360110458,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.40540540540540543,
						"acc_norm,none": 0.40540540540540543,
						"acc_norm_stderr,none": 0.08182838794858087,
						"acc_stderr,none": 0.08182838794858087,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.08539125638299665,
						"acc_stderr,none": 0.08539125638299665,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633637,
						"acc_stderr,none": 0.09169709590633637,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.07802030664724673,
						"acc_stderr,none": 0.07802030664724673,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031764,
						"acc_stderr,none": 0.07633651333031764,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.45,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.11413288653790232,
						"acc_stderr,none": 0.11413288653790232,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996394,
						"acc_stderr,none": 0.08081046758996394,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4583333333333333,
						"acc_norm,none": 0.4583333333333333,
						"acc_norm_stderr,none": 0.10389457216622949,
						"acc_stderr,none": 0.10389457216622949,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.10094660663590604,
						"acc_stderr,none": 0.10094660663590604,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.10942433098048311,
						"acc_stderr,none": 0.10942433098048311,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.11236664374387367,
						"acc_stderr,none": 0.11236664374387367,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764435,
						"acc_stderr,none": 0.09361833424764435,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.32653061224489793,
						"acc_norm,none": 0.32653061224489793,
						"acc_norm_stderr,none": 0.06768622021133469,
						"acc_stderr,none": 0.06768622021133469,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.20689655172413793,
						"acc_norm,none": 0.20689655172413793,
						"acc_norm_stderr,none": 0.07655305550699534,
						"acc_stderr,none": 0.07655305550699534,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.14285714285714285,
						"acc_norm,none": 0.14285714285714285,
						"acc_norm_stderr,none": 0.05050762722761052,
						"acc_stderr,none": 0.05050762722761052,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.07102933373079212,
						"acc_stderr,none": 0.07102933373079212,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.2826086956521739,
						"acc_norm,none": 0.2826086956521739,
						"acc_norm_stderr,none": 0.06712194885164875,
						"acc_stderr,none": 0.06712194885164875,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.43478260869565216,
						"acc_norm,none": 0.43478260869565216,
						"acc_norm_stderr,none": 0.10568965974008647,
						"acc_stderr,none": 0.10568965974008647,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.28811949576929713,
						"acc_norm,none": 0.28811949576929713,
						"acc_norm_stderr,none": 0.04771127590627858,
						"acc_stderr,none": 0.04771127590627858,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.037698374558241474,
						"acc_stderr,none": 0.037698374558241474,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2804878048780488,
						"acc_norm,none": 0.2804878048780488,
						"acc_norm_stderr,none": 0.03518700228801578,
						"acc_stderr,none": 0.03518700228801578,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.0348937065201876,
						"acc_stderr,none": 0.0348937065201876,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.03401506715249039,
						"acc_stderr,none": 0.03401506715249039,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.03223012819451555,
						"acc_stderr,none": 0.03223012819451555,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018762,
						"acc_stderr,none": 0.03489370652018762,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2824427480916031,
						"acc_norm,none": 0.2824427480916031,
						"acc_norm_stderr,none": 0.03948406125768361,
						"acc_stderr,none": 0.03948406125768361,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.23529411764705882,
						"acc_norm,none": 0.23529411764705882,
						"acc_norm_stderr,none": 0.036507817107892686,
						"acc_stderr,none": 0.036507817107892686,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.34579439252336447,
						"acc_norm,none": 0.34579439252336447,
						"acc_norm_stderr,none": 0.0461969359662258,
						"acc_stderr,none": 0.0461969359662258,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2848297213622291,
						"acc_norm,none": 0.2848297213622291,
						"acc_norm_stderr,none": 0.02515182168617951,
						"acc_stderr,none": 0.02515182168617951,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2696078431372549,
						"acc_norm,none": 0.2696078431372549,
						"acc_norm_stderr,none": 0.03114557065948678,
						"acc_stderr,none": 0.03114557065948678,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2681564245810056,
						"acc_norm,none": 0.2681564245810056,
						"acc_norm_stderr,none": 0.03320421630673714,
						"acc_stderr,none": 0.03320421630673714,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2616033755274262,
						"acc_norm,none": 0.2616033755274262,
						"acc_norm_stderr,none": 0.028609516716994934,
						"acc_stderr,none": 0.028609516716994934,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.04218811928205305,
						"acc_stderr,none": 0.04218811928205305,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.04480127092110671,
						"acc_stderr,none": 0.04480127092110671,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.036028141763926436,
						"acc_stderr,none": 0.036028141763926436,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.330188679245283,
						"acc_norm,none": 0.330188679245283,
						"acc_norm_stderr,none": 0.045894715469579954,
						"acc_stderr,none": 0.045894715469579954,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2600732600732601,
						"acc_norm,none": 0.2600732600732601,
						"acc_norm_stderr,none": 0.02659853762760148,
						"acc_stderr,none": 0.02659853762760148,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.03198001660115071,
						"acc_stderr,none": 0.03198001660115071,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2573099415204678,
						"acc_norm,none": 0.2573099415204678,
						"acc_norm_stderr,none": 0.03352799844161865,
						"acc_stderr,none": 0.03352799844161865,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.3129251700680272,
						"acc_norm,none": 0.3129251700680272,
						"acc_norm_stderr,none": 0.03837477482026868,
						"acc_stderr,none": 0.03837477482026868,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2949640287769784,
						"acc_norm,none": 0.2949640287769784,
						"acc_norm_stderr,none": 0.03881956126735707,
						"acc_stderr,none": 0.03881956126735707,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.036522158784075054,
						"acc_stderr,none": 0.036522158784075054,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3128834355828221,
						"acc_norm,none": 0.3128834355828221,
						"acc_norm_stderr,none": 0.036429145782924055,
						"acc_stderr,none": 0.036429145782924055,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.3313953488372093,
						"acc_norm,none": 0.3313953488372093,
						"acc_norm_stderr,none": 0.03599646438179593,
						"acc_stderr,none": 0.03599646438179593,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.31746031746031744,
						"acc_norm,none": 0.31746031746031744,
						"acc_norm_stderr,none": 0.02938135465203213,
						"acc_stderr,none": 0.02938135465203213,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.29292929292929293,
						"acc_norm,none": 0.29292929292929293,
						"acc_norm_stderr,none": 0.032424979581788166,
						"acc_stderr,none": 0.032424979581788166,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.35714285714285715,
						"acc_norm,none": 0.35714285714285715,
						"acc_norm_stderr,none": 0.031124619309328177,
						"acc_stderr,none": 0.031124619309328177,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.27391304347826084,
						"acc_norm,none": 0.27391304347826084,
						"acc_norm_stderr,none": 0.029470189815005897,
						"acc_stderr,none": 0.029470189815005897,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.32592592592592595,
						"acc_norm,none": 0.32592592592592595,
						"acc_norm_stderr,none": 0.040491220417025055,
						"acc_stderr,none": 0.040491220417025055,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.04036845779880778,
						"acc_stderr,none": 0.04036845779880778,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.30113636363636365,
						"acc_norm,none": 0.30113636363636365,
						"acc_norm_stderr,none": 0.03467837977202437,
						"acc_stderr,none": 0.03467837977202437,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2684563758389262,
						"acc_norm,none": 0.2684563758389262,
						"acc_norm_stderr,none": 0.036427227538629016,
						"acc_stderr,none": 0.036427227538629016,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.033071627503231754,
						"acc_stderr,none": 0.033071627503231754,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.0357179155646827,
						"acc_stderr,none": 0.0357179155646827,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714263,
						"acc_stderr,none": 0.04025566684714263,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.03445000289173459,
						"acc_stderr,none": 0.03445000289173459,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2818181818181818,
						"acc_norm,none": 0.2818181818181818,
						"acc_norm_stderr,none": 0.043091187099464585,
						"acc_stderr,none": 0.043091187099464585,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.32867132867132864,
						"acc_norm,none": 0.32867132867132864,
						"acc_norm_stderr,none": 0.03941888501263192,
						"acc_stderr,none": 0.03941888501263192,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.3412698412698413,
						"acc_norm,none": 0.3412698412698413,
						"acc_norm_stderr,none": 0.04240799327574924,
						"acc_stderr,none": 0.04240799327574924,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.033330686633366996,
						"acc_stderr,none": 0.033330686633366996,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2616279069767442,
						"acc_norm,none": 0.2616279069767442,
						"acc_norm_stderr,none": 0.033611014038904936,
						"acc_stderr,none": 0.033611014038904936,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.30170316301703165,
						"acc_norm,none": 0.30170316301703165,
						"acc_norm_stderr,none": 0.022668252455186565,
						"acc_stderr,none": 0.022668252455186565,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3317757009345794,
						"acc_norm,none": 0.3317757009345794,
						"acc_norm_stderr,none": 0.03226217317322115,
						"acc_stderr,none": 0.03226217317322115,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3821138211382114,
						"acc_norm,none": 0.3821138211382114,
						"acc_norm_stderr,none": 0.043991695270045095,
						"acc_stderr,none": 0.043991695270045095,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3114754098360656,
						"acc_norm,none": 0.3114754098360656,
						"acc_norm_stderr,none": 0.0420996926731014,
						"acc_stderr,none": 0.0420996926731014,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2523809523809524,
						"acc_norm,none": 0.2523809523809524,
						"acc_norm_stderr,none": 0.03004659915603149,
						"acc_stderr,none": 0.03004659915603149,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.34444444444444444,
						"acc_norm,none": 0.34444444444444444,
						"acc_norm_stderr,none": 0.03551712696743982,
						"acc_stderr,none": 0.03551712696743982,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3439153439153439,
						"acc_norm,none": 0.3439153439153439,
						"acc_norm_stderr,none": 0.0346439012574329,
						"acc_stderr,none": 0.0346439012574329,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.043140913253187876,
						"acc_stderr,none": 0.043140913253187876,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2689655172413793,
						"acc_norm,none": 0.2689655172413793,
						"acc_norm_stderr,none": 0.03695183311650232,
						"acc_stderr,none": 0.03695183311650232,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.045521571818039494,
						"acc_stderr,none": 0.045521571818039494,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.29714285714285715,
						"acc_norm,none": 0.29714285714285715,
						"acc_norm_stderr,none": 0.034645078898843704,
						"acc_stderr,none": 0.034645078898843704,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2938388625592417,
						"acc_norm,none": 0.2938388625592417,
						"acc_norm_stderr,none": 0.03143379932562227,
						"acc_stderr,none": 0.03143379932562227,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2526595744680851,
						"acc_norm,none": 0.2526595744680851,
						"acc_norm_stderr,none": 0.0224394125827864,
						"acc_stderr,none": 0.0224394125827864,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.29310344827586204,
						"acc_norm,none": 0.29310344827586204,
						"acc_norm_stderr,none": 0.02994900549662091,
						"acc_stderr,none": 0.02994900549662091,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.28735632183908044,
						"acc_norm,none": 0.28735632183908044,
						"acc_norm_stderr,none": 0.03440515707228721,
						"acc_stderr,none": 0.03440515707228721,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.037857144650666544,
						"acc_stderr,none": 0.037857144650666544,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.26991150442477874,
						"acc_norm,none": 0.26991150442477874,
						"acc_norm_stderr,none": 0.029594239995417413,
						"acc_stderr,none": 0.029594239995417413,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03453131801885415,
						"acc_stderr,none": 0.03453131801885415,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3136094674556213,
						"acc_norm,none": 0.3136094674556213,
						"acc_norm_stderr,none": 0.03579526516456225,
						"acc_stderr,none": 0.03579526516456225,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.03637652289278585,
						"acc_stderr,none": 0.03637652289278585,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865142,
						"acc_stderr,none": 0.03462157845865142,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.14309681607209304,
						"mcc_stderr,none": 0.030456800175785496
					},
					"copa": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.025643239997624283,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.181839169244647,
						"likelihood_diff_stderr,none": 0.5607906008510215,
						"pct_stereotype,none": 0.5682766845557544,
						"pct_stereotype_stderr,none": 0.09236396956361448
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.974235616557713,
						"likelihood_diff_stderr,none": 0.09726762891127427,
						"pct_stereotype,none": 0.6457960644007156,
						"pct_stereotype_stderr,none": 0.011682542807413805
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.053104568313766,
						"likelihood_diff_stderr,none": 0.4255125073291548,
						"pct_stereotype,none": 0.7252747252747253,
						"pct_stereotype_stderr,none": 0.04705213398778438
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.811228145252574,
						"likelihood_diff_stderr,none": 2.116987235096053,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.792199208186223,
						"likelihood_diff_stderr,none": 0.6661931873667893,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 3.016516315937042,
						"likelihood_diff_stderr,none": 0.2174929586710006,
						"pct_stereotype,none": 0.61875,
						"pct_stereotype_stderr,none": 0.02719363040277548
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.816477236924348,
						"likelihood_diff_stderr,none": 0.24770533285790727,
						"pct_stereotype,none": 0.5925925925925926,
						"pct_stereotype_stderr,none": 0.033509916046960436
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.255572160085042,
						"likelihood_diff_stderr,none": 0.41402657772582907,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.7150638309989388,
						"likelihood_diff_stderr,none": 0.1627624749580204,
						"pct_stereotype,none": 0.5551181102362205,
						"pct_stereotype_stderr,none": 0.022070444592370703
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.7339531322857282,
						"likelihood_diff_stderr,none": 0.37747395195124633,
						"pct_stereotype,none": 0.7387387387387387,
						"pct_stereotype_stderr,none": 0.04188770861432398
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.5008001840242775,
						"likelihood_diff_stderr,none": 0.48919070986852764,
						"pct_stereotype,none": 0.8817204301075269,
						"pct_stereotype_stderr,none": 0.03366870454347983
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.629115526299728,
						"likelihood_diff_stderr,none": 0.26925262514093556,
						"pct_stereotype,none": 0.7,
						"pct_stereotype_stderr,none": 0.03333333333333337
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 4.394580919542125,
						"likelihood_diff_stderr,none": 0.10336634492127922,
						"pct_stereotype,none": 0.4895646988670245,
						"pct_stereotype_stderr,none": 0.012210638982043403
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.900425042046441,
						"likelihood_diff_stderr,none": 0.3679156388346811,
						"pct_stereotype,none": 0.43333333333333335,
						"pct_stereotype_stderr,none": 0.05252667118728807
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 4.013113755446214,
						"likelihood_diff_stderr,none": 1.10711764048032,
						"pct_stereotype,none": 0.38461538461538464,
						"pct_stereotype_stderr,none": 0.14044168141158106
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 6.540024439493815,
						"likelihood_diff_stderr,none": 0.7011803725248559,
						"pct_stereotype,none": 0.5606060606060606,
						"pct_stereotype_stderr,none": 0.06156009014560979
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.928975732155678,
						"likelihood_diff_stderr,none": 0.20651118481984726,
						"pct_stereotype,none": 0.5358255451713395,
						"pct_stereotype_stderr,none": 0.027879009258377083
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 5.092127396655177,
						"likelihood_diff_stderr,none": 0.26595649912933217,
						"pct_stereotype,none": 0.2727272727272727,
						"pct_stereotype_stderr,none": 0.02805515453856212
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 5.116797341240777,
						"likelihood_diff_stderr,none": 0.7027054775737525,
						"pct_stereotype,none": 0.5416666666666666,
						"pct_stereotype_stderr,none": 0.05913268547421811
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 4.0648530877154805,
						"likelihood_diff_stderr,none": 0.19786194135109744,
						"pct_stereotype,none": 0.47391304347826085,
						"pct_stereotype_stderr,none": 0.0233062153668594
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.8962842194930367,
						"likelihood_diff_stderr,none": 0.3246859127018441,
						"pct_stereotype,none": 0.5391304347826087,
						"pct_stereotype_stderr,none": 0.04668566114758416
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 4.168544266250107,
						"likelihood_diff_stderr,none": 0.36910801449021596,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.048650425541051985
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.612948583096874,
						"likelihood_diff_stderr,none": 0.3104578130453755,
						"pct_stereotype,none": 0.6224489795918368,
						"pct_stereotype_stderr,none": 0.03471541794449721
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.034940944881889764,
						"exact_match_stderr,none": 0.004074640578111424
					},
					"glue": {
						"acc,none": 0.5272280365583858,
						"acc_stderr,none": 0.036256377458803654,
						"alias": "glue",
						"f1,none": 0.4514060837807571,
						"f1_stderr,none": 0.0010924081558753881,
						"mcc,none": 0.14309681607209304,
						"mcc_stderr,none": 0.0009276166769477274
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.5807429871114481,
						"exact_match_stderr,get-answer": 0.013591720959042115
					},
					"hellaswag": {
						"acc,none": 0.5577574188408684,
						"acc_norm,none": 0.7360087631945827,
						"acc_norm_stderr,none": 0.004398937225038426,
						"acc_stderr,none": 0.004956378590571543,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.179151025122726,
						"acc_norm,none": 0.179151025122726,
						"acc_norm_stderr,none": 0.038678746658561663,
						"acc_stderr,none": 0.038678746658561663,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.04020151261036843,
						"acc_stderr,none": 0.04020151261036843,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.161,
						"acc_norm,none": 0.161,
						"acc_norm_stderr,none": 0.01162816469672718,
						"acc_stderr,none": 0.01162816469672718,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.149,
						"acc_norm,none": 0.149,
						"acc_norm_stderr,none": 0.011266140684632176,
						"acc_stderr,none": 0.011266140684632176,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.227,
						"acc_norm,none": 0.227,
						"acc_norm_stderr,none": 0.013253174964763912,
						"acc_stderr,none": 0.013253174964763912,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.252,
						"acc_norm,none": 0.252,
						"acc_norm_stderr,none": 0.013736254390651155,
						"acc_stderr,none": 0.013736254390651155,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.25333333333333335,
						"acc_norm,none": 0.25333333333333335,
						"acc_norm_stderr,none": 0.017770356455067436,
						"acc_stderr,none": 0.017770356455067436,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.141,
						"acc_norm,none": 0.141,
						"acc_norm_stderr,none": 0.011010914595992441,
						"acc_stderr,none": 0.011010914595992441,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.115,
						"acc_norm,none": 0.115,
						"acc_norm_stderr,none": 0.01009340759490461,
						"acc_stderr,none": 0.01009340759490461,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.147,
						"acc_norm,none": 0.147,
						"acc_norm_stderr,none": 0.01120341539516033,
						"acc_stderr,none": 0.01120341539516033,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.029832025555495235,
						"acc_stderr,none": 0.029832025555495235,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.102,
						"acc_norm,none": 0.102,
						"acc_norm_stderr,none": 0.009575368801653883,
						"acc_stderr,none": 0.009575368801653883,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.040347329239296424,
						"acc_stderr,none": 0.040347329239296424,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.04408440022768079,
						"acc_stderr,none": 0.04408440022768079,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.212,
						"acc_norm,none": 0.212,
						"acc_norm_stderr,none": 0.012931481864938026,
						"acc_stderr,none": 0.012931481864938026,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.142,
						"acc_norm,none": 0.142,
						"acc_norm_stderr,none": 0.011043457699378216,
						"acc_stderr,none": 0.011043457699378216,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.221,
						"acc_norm,none": 0.221,
						"acc_norm_stderr,none": 0.013127502859696256,
						"acc_stderr,none": 0.013127502859696256,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.166,
						"acc_norm,none": 0.166,
						"acc_norm_stderr,none": 0.01177211037081219,
						"acc_stderr,none": 0.01177211037081219,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.169,
						"acc_norm,none": 0.169,
						"acc_norm_stderr,none": 0.011856625977890115,
						"acc_stderr,none": 0.011856625977890115,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.169,
						"acc_norm,none": 0.169,
						"acc_norm_stderr,none": 0.01185662597789012,
						"acc_stderr,none": 0.01185662597789012,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.173,
						"acc_norm,none": 0.173,
						"acc_norm_stderr,none": 0.011967214137559962,
						"acc_stderr,none": 0.011967214137559962,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.201,
						"acc_norm,none": 0.201,
						"acc_norm_stderr,none": 0.012679107214617326,
						"acc_stderr,none": 0.012679107214617326,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.04093601807403326,
						"acc_stderr,none": 0.04093601807403326,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.131,
						"acc_norm,none": 0.131,
						"acc_norm_stderr,none": 0.01067487484483795,
						"acc_stderr,none": 0.01067487484483795,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.152,
						"acc_norm,none": 0.152,
						"acc_norm_stderr,none": 0.011358918303475287,
						"acc_stderr,none": 0.011358918303475287,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.126,
						"acc_norm,none": 0.126,
						"acc_norm_stderr,none": 0.010499249222408054,
						"acc_stderr,none": 0.010499249222408054,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.233,
						"acc_norm,none": 0.233,
						"acc_norm_stderr,none": 0.013374972519220072,
						"acc_stderr,none": 0.013374972519220072,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.161,
						"acc_norm,none": 0.161,
						"acc_norm_stderr,none": 0.011628164696727176,
						"acc_stderr,none": 0.011628164696727176,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.217,
						"acc_norm,none": 0.217,
						"acc_norm_stderr,none": 0.01304151375727071,
						"acc_stderr,none": 0.01304151375727071,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.22333333333333333,
						"acc_norm,none": 0.22333333333333333,
						"acc_norm_stderr,none": 0.017016909765167516,
						"acc_stderr,none": 0.017016909765167516,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.142,
						"acc_norm,none": 0.142,
						"acc_norm_stderr,none": 0.011043457699378235,
						"acc_stderr,none": 0.011043457699378235,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.184,
						"acc_norm,none": 0.184,
						"acc_norm_stderr,none": 0.012259457340938579,
						"acc_stderr,none": 0.012259457340938579,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.184,
						"acc_norm,none": 0.184,
						"acc_norm_stderr,none": 0.01225945734093859,
						"acc_stderr,none": 0.01225945734093859,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.136,
						"acc_norm,none": 0.136,
						"acc_norm_stderr,none": 0.010845350230472988,
						"acc_stderr,none": 0.010845350230472988,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.0446196043338474,
						"acc_stderr,none": 0.0446196043338474,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.21333333333333335,
						"acc_norm,none": 0.21333333333333335,
						"acc_norm_stderr,none": 0.02369131349654082,
						"acc_stderr,none": 0.02369131349654082,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.013569640199177451,
						"acc_stderr,none": 0.013569640199177451,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.169,
						"acc_norm,none": 0.169,
						"acc_norm_stderr,none": 0.011856625977890115,
						"acc_stderr,none": 0.011856625977890115,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.012535235623319338,
						"acc_stderr,none": 0.012535235623319338,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.17,
						"acc_norm,none": 0.17,
						"acc_norm_stderr,none": 0.02662790314934043,
						"acc_stderr,none": 0.02662790314934043,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.189,
						"acc_norm,none": 0.189,
						"acc_norm_stderr,none": 0.01238678458811771,
						"acc_stderr,none": 0.01238678458811771,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.218,
						"acc_norm,none": 0.218,
						"acc_norm_stderr,none": 0.01306317904059529,
						"acc_stderr,none": 0.01306317904059529,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.02912242397001744,
						"acc_stderr,none": 0.02912242397001744,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.171,
						"acc_norm,none": 0.171,
						"acc_norm_stderr,none": 0.011912216456264594,
						"acc_stderr,none": 0.011912216456264594,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.47138785354089013,
						"acc_norm,none": 0.424,
						"acc_norm_stderr,none": 0.0004894268537074177,
						"acc_stderr,none": 0.040579888948371924,
						"alias": "kobest",
						"f1,none": 0.36424351286352064,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.471,
						"acc_stderr,none": 0.0157926694516289,
						"alias": " - kobest_copa",
						"f1,none": 0.47019416532546,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.332,
						"acc_norm,none": 0.424,
						"acc_norm_stderr,none": 0.022122993778135404,
						"acc_stderr,none": 0.021081766571222856,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3294163436886228,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.48614609571788414,
						"acc_stderr,none": 0.0251162986508672,
						"alias": " - kobest_sentineg",
						"f1,none": 0.36221886221886224,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5460896565107705,
						"acc_stderr,none": 0.03275215227644765,
						"alias": "lambada",
						"perplexity,none": 9.543702595750869,
						"perplexity_stderr,none": 1.8325936371002682
					},
					"lambada_cloze": {
						"acc,none": 0.0723850184358626,
						"acc_stderr,none": 0.01820903337801098,
						"alias": "lambada_cloze",
						"perplexity,none": 366.66627229948585,
						"perplexity_stderr,none": 106.94063108331319
					},
					"lambada_multilingual": {
						"acc,none": 0.2908208810401708,
						"acc_stderr,none": 0.11431811937529752,
						"alias": "lambada_multilingual",
						"perplexity,none": 612.5333247891786,
						"perplexity_stderr,none": 317.2840073227033
					},
					"lambada_openai": {
						"acc,none": 0.6116825150397827,
						"acc_stderr,none": 0.006789981313755397,
						"alias": " - lambada_openai",
						"perplexity,none": 5.936726119786097,
						"perplexity_stderr,none": 0.1612343224527864
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.10809237337473317,
						"acc_stderr,none": 0.004325830894250449,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 154.69916766446076,
						"perplexity_stderr,none": 5.643105592313111
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.1560256161459344,
						"acc_stderr,none": 0.005055622739428333,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 1219.8403248909792,
						"perplexity_stderr,none": 84.52281577320403
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6097418979235397,
						"acc_stderr,none": 0.006796120271549717,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 5.933068057999128,
						"perplexity_stderr,none": 0.16089511667244943
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.21696099359596352,
						"acc_stderr,none": 0.005742415346929946,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 507.71120565942937,
						"perplexity_stderr,none": 34.29537603520904
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.2672229769066563,
						"acc_stderr,none": 0.006165025727477476,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 307.1339806394436,
						"perplexity_stderr,none": 20.06748155588759
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.20415292062875995,
						"acc_stderr,none": 0.005615710162255017,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 1022.0480446980415,
						"perplexity_stderr,none": 73.90082135694692
					},
					"lambada_standard": {
						"acc,none": 0.4820492916747526,
						"acc_stderr,none": 0.006961486944579351,
						"alias": " - lambada_standard",
						"perplexity,none": 13.152366947175787,
						"perplexity_stderr,none": 0.4231240456437626
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.036677663496992044,
						"acc_stderr,none": 0.0026187782113317767,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 578.6333769345109,
						"perplexity_stderr,none": 19.328350144477657
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.4243002544529262,
						"exact_match_stderr,get-answer": 0.012469429161169032
					},
					"logiqa": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.30414746543778803,
						"acc_norm_stderr,none": 0.01804446579150677,
						"acc_stderr,none": 0.01716289475512707,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2856234096692112,
						"acc_norm,none": 0.29961832061068705,
						"acc_norm_stderr,none": 0.011557488735539873,
						"acc_stderr,none": 0.011396524130843133,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.3102177554438861,
						"acc_norm,none": 0.3075376884422111,
						"acc_norm_stderr,none": 0.008447881903537013,
						"acc_stderr,none": 0.008468176898858482,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3877356492268587,
						"acc_stderr,none": 0.005014508296822146,
						"alias": "mc_taco",
						"f1,none": 0.5163557265958337,
						"f1_stderr,none": 0.005573551184115024
					},
					"medmcqa": {
						"acc,none": 0.38369591202486253,
						"acc_norm,none": 0.38369591202486253,
						"acc_norm_stderr,none": 0.007519675437152951,
						"acc_stderr,none": 0.007519675437152951,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3857030636292223,
						"acc_norm,none": 0.3857030636292223,
						"acc_norm_stderr,none": 0.013648098974225574,
						"acc_stderr,none": 0.013648098974225574,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.5435835351089588,
						"acc_stderr,none": 0.12867933183083602,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4740740740740741,
						"acc_stderr,none": 0.04313531696750575,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.5592105263157895,
						"acc_stderr,none": 0.04040311062490436,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.04923659639173309,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.6075471698113207,
						"acc_stderr,none": 0.03005258057955784,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.6041666666666666,
						"acc_stderr,none": 0.04089465449325582,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.44,
						"acc_stderr,none": 0.04988876515698589,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145632,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.5549132947976878,
						"acc_stderr,none": 0.03789401760283647,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.3137254901960784,
						"acc_stderr,none": 0.04617034827006718,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.0479372485441102,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.48936170212765956,
						"acc_stderr,none": 0.03267862331014063,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.30701754385964913,
						"acc_stderr,none": 0.043391383225798594,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.5448275862068965,
						"acc_stderr,none": 0.04149886942192118,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.3888888888888889,
						"acc_stderr,none": 0.025107425481137292,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3253968253968254,
						"acc_stderr,none": 0.041905964388711366,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.6870967741935484,
						"acc_stderr,none": 0.02637756702864586,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.4729064039408867,
						"acc_stderr,none": 0.03512819077876106,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.67,
						"acc_stderr,none": 0.04725815626252609,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.703030303030303,
						"acc_stderr,none": 0.03567969772268048,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.7121212121212122,
						"acc_stderr,none": 0.03225883512300992,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.7772020725388601,
						"acc_stderr,none": 0.03003114797764154,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.5769230769230769,
						"acc_stderr,none": 0.02504919787604234,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.02794045713622841,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.5966386554621849,
						"acc_stderr,none": 0.031866081214088314,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.33774834437086093,
						"acc_stderr,none": 0.03861557546255169,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.7541284403669725,
						"acc_stderr,none": 0.01846194096870845,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.4305555555555556,
						"acc_stderr,none": 0.03376922151252335,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.6617647058823529,
						"acc_stderr,none": 0.03320574612945431,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.7552742616033755,
						"acc_stderr,none": 0.02798569938703641,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.6322869955156951,
						"acc_stderr,none": 0.03236198350928276,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.6641221374045801,
						"acc_stderr,none": 0.04142313771996663,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.48862911795961744,
						"acc_stderr,none": 0.15315524389906154,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.7603305785123967,
						"acc_stderr,none": 0.03896878985070417,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.7314814814814815,
						"acc_stderr,none": 0.042844679680521934,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.7423312883435583,
						"acc_stderr,none": 0.03436150827846917,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.49107142857142855,
						"acc_stderr,none": 0.04745033255489123,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.7378640776699029,
						"acc_stderr,none": 0.043546310772605956,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.8076923076923077,
						"acc_stderr,none": 0.02581923325648375,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.6909323116219668,
						"acc_stderr,none": 0.016524988919702183,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.661849710982659,
						"acc_stderr,none": 0.02546977014940017,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2324022346368715,
						"acc_stderr,none": 0.014125968754673385,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.6111111111111112,
						"acc_stderr,none": 0.027914055510468008,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.6021886063727068,
						"acc_stderr,none": 0.08697609602962406,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.5659163987138264,
						"acc_stderr,none": 0.0281502322445356,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.5987654320987654,
						"acc_stderr,none": 0.027272582849839803,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.41843971631205673,
						"acc_stderr,none": 0.029427994039419994,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.39765319426336376,
						"acc_stderr,none": 0.012499840347460645,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.4485294117647059,
						"acc_stderr,none": 0.030211479609121603,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.5408496732026143,
						"acc_stderr,none": 0.020160213617222516,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.6181818181818182,
						"acc_stderr,none": 0.046534298079135075,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.673469387755102,
						"acc_stderr,none": 0.030021056238440313,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.64478388040299,
						"acc_stderr,none": 0.0950214421255835,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.7761194029850746,
						"acc_stderr,none": 0.02947525023601719,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.46907706945765937,
						"acc_stderr,none": 0.10934553669332531,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.4819277108433735,
						"acc_stderr,none": 0.03889951252827216,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.695906432748538,
						"acc_stderr,none": 0.035282112582452306,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.5361181864493123,
						"acc_stderr,none": 0.005033973398909419,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.5482099267697315,
						"acc_stderr,none": 0.005019297547981701,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.678921568627451,
						"acc_stderr,none": 0.023142920563024697,
						"alias": "mrpc",
						"f1,none": 0.808199121522694,
						"f1_stderr,none": 0.01642777543700678
					},
					"multimedqa": {
						"acc,none": 0.4360539389638041,
						"acc_norm,none": 0.3866237546936221,
						"acc_norm_stderr,none": 0.00010396317114414426,
						"acc_stderr,none": 0.08521544293717373,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5577557755775577,
						"acc_stderr,none": 0.007133729098987128,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6815274659182633,
						"mrr_stderr,none": 0.010383499901557633,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4187358916478555,
						"r@2_stderr,none": 0.01658384431636118
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.655850264880781,
						"mrr_stderr,none": 0.010431260893956715,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.46613995485327314,
						"r@2_stderr,none": 0.016768732584115823
					},
					"openbookqa": {
						"acc,none": 0.39,
						"acc_norm,none": 0.51,
						"acc_norm_stderr,none": 0.02237859698923078,
						"acc_stderr,none": 0.021834685869369208,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4515,
						"acc_stderr,none": 0.011130400617630761,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.01095719079029897,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4505,
						"acc_stderr,none": 0.01112819811994288,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.4885,
						"acc_stderr,none": 0.011180177690296085,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5645,
						"acc_stderr,none": 0.01108969637469111,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.535,
						"acc_stderr,none": 0.011155703691943106,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4465,
						"acc_stderr,none": 0.01111893386729012,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.47664285714285715,
						"acc_stderr,none": 0.040151566652334436,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7905331882480957,
						"acc_norm,none": 0.7878128400435256,
						"acc_norm_stderr,none": 0.009539299828174086,
						"acc_stderr,none": 0.009494302979819798,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.4384607173356106,
						"acc_norm,none": 0.4040883859948762,
						"acc_norm_stderr,none": 0.0035851081419657287,
						"acc_stderr,none": 0.0036251715893172135,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.019536923574747612,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7476669195373873,
						"acc_norm,none": 0.7007782389502576,
						"acc_norm_stderr,none": 0.0033484835597671313,
						"acc_stderr,none": 0.149535192480841,
						"alias": "pythia",
						"bits_per_byte,none": 0.6511297505023197,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5703974665687999,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.936726119786097,
						"perplexity_stderr,none": 0.1612343224527864,
						"word_perplexity,none": 11.17253144821841,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4397163120567376,
						"acc_norm,none": 0.4734042553191489,
						"acc_norm_stderr,none": 0.06316389564033567,
						"acc_stderr,none": 0.04430979896518065,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5916666666666667,
						"acc_norm_stderr,none": 0.045058059858031296,
						"acc_stderr,none": 0.04583492485141056,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.45,
						"acc_norm,none": 0.51875,
						"acc_norm_stderr,none": 0.03962468875738331,
						"acc_stderr,none": 0.03945381823835187,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.4084507042253521,
						"acc_norm,none": 0.397887323943662,
						"acc_norm_stderr,none": 0.029095492917064893,
						"acc_stderr,none": 0.029219452741745366,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5293794618341571,
						"acc_stderr,none": 0.006753721287612185,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5113034875092753,
						"acc_stderr,none": 0.002486065104821426,
						"alias": "qqp",
						"f1,none": 0.44840871021775547,
						"f1_stderr,none": 0.0032698635196570605
					},
					"race": {
						"acc,none": 0.3799043062200957,
						"acc_stderr,none": 0.015021600804935652,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.628158844765343,
						"acc_stderr,none": 0.029091018492217444,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.948,
						"acc_norm,none": 0.929,
						"acc_norm_stderr,none": 0.00812557844248792,
						"acc_stderr,none": 0.0070246242138171456,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.628158844765343,
						"acc_stderr,none": 0.029091018492217444,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8715596330275229,
						"acc_stderr,none": 0.011336793735355337,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5309407177846646,
						"acc_norm,none": 0.7148355493351994,
						"acc_norm_stderr,none": 0.003192136729976297,
						"acc_stderr,none": 0.003528317069625774,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.7743169944427806,
						"acc_stderr,none": 0.08541932751995714,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.842948717948718,
						"acc_stderr,none": 0.003641587793882798,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.913550217898044,
						"acc_stderr,none": 0.00282929093879479,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5724509803921569,
						"acc_stderr,none": 0.00489872785581899,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.39610199905344434,
						"acc_stderr,none": 0.04520086403288278,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.37821297429620565,
						"bleu_acc_stderr,none": 0.0002881959808458651,
						"bleu_diff,none": -2.2779872656744,
						"bleu_diff_stderr,none": 0.8549006596403927,
						"bleu_max,none": 30.46153324391041,
						"bleu_max_stderr,none": 0.6771264546254333,
						"rouge1_acc,none": 0.38310893512851896,
						"rouge1_acc_stderr,none": 0.0002896280379328611,
						"rouge1_diff,none": -2.2806195346786753,
						"rouge1_diff_stderr,none": 1.2053901795631718,
						"rouge1_max,none": 56.19075110872051,
						"rouge1_max_stderr,none": 0.7074586044524152,
						"rouge2_acc,none": 0.35495716034271724,
						"rouge2_acc_stderr,none": 0.00028059139051979436,
						"rouge2_diff,none": -3.1369550564443758,
						"rouge2_diff_stderr,none": 1.6280864816942957,
						"rouge2_max,none": 41.80472763325914,
						"rouge2_max_stderr,none": 1.0270015419774554,
						"rougeL_acc,none": 0.3708690330477356,
						"rougeL_acc_stderr,none": 0.00028593773697790806,
						"rougeL_diff,none": -2.2530349744358436,
						"rougeL_diff_stderr,none": 1.250153037652559,
						"rougeL_max,none": 53.416953676625305,
						"rougeL_max_stderr,none": 0.7497627896676777
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.37821297429620565,
						"bleu_acc_stderr,none": 0.016976335907546866,
						"bleu_diff,none": -2.2779872656744,
						"bleu_diff_stderr,none": 0.9246083817705704,
						"bleu_max,none": 30.46153324391041,
						"bleu_max_stderr,none": 0.8228769377163473,
						"rouge1_acc,none": 0.38310893512851896,
						"rouge1_acc_stderr,none": 0.01701846167938986,
						"rouge1_diff,none": -2.2806195346786753,
						"rouge1_diff_stderr,none": 1.0979026275417925,
						"rouge1_max,none": 56.19075110872051,
						"rouge1_max_stderr,none": 0.841105584604225,
						"rouge2_acc,none": 0.35495716034271724,
						"rouge2_acc_stderr,none": 0.0167508623813759,
						"rouge2_diff,none": -3.1369550564443758,
						"rouge2_diff_stderr,none": 1.2759649218118403,
						"rouge2_max,none": 41.80472763325914,
						"rouge2_max_stderr,none": 1.013410845598889,
						"rougeL_acc,none": 0.3708690330477356,
						"rougeL_acc_stderr,none": 0.016909693580248818,
						"rougeL_diff,none": -2.2530349744358436,
						"rougeL_diff_stderr,none": 1.1181024271740756,
						"rougeL_max,none": 53.416953676625305,
						"rougeL_max_stderr,none": 0.8658884395045806
					},
					"truthfulqa_mc1": {
						"acc,none": 0.30599755201958384,
						"acc_stderr,none": 0.016132229728155034,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4411542225703746,
						"acc_stderr,none": 0.015077476755719722,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.034940944881889764,
						"exact_match_stderr,none": 0.004074640578111424
					},
					"wic": {
						"acc,none": 0.49843260188087773,
						"acc_stderr,none": 0.019810623954060382,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6511297505023197,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5703974665687999,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 11.17253144821841,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7537490134175217,
						"acc_stderr,none": 0.01210836530743753,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.49295774647887325,
						"acc_stderr,none": 0.059755502635482904,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.5192307692307693,
						"acc_stderr,none": 0.049230010729780505,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7875457875457875,
						"acc_stderr,none": 0.02480196713503143,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5121818181818181,
						"acc_stderr,none": 0.02974222764408025,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.02238289498648353,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.02238235778196214,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.02238235778196214,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.478,
						"acc_stderr,none": 0.022361396739207888,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.022365160424231333,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.022342748192502846,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.022252153078595897,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.022270877485360437,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668093,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.508,
						"acc_stderr,none": 0.02238020883492804,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.3601338688085676,
						"acc_stderr,none": 0.04586954745748627,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3377510040160643,
						"acc_stderr,none": 0.009479742273956485,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3413654618473896,
						"acc_stderr,none": 0.009504288078880218,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.36626506024096384,
						"acc_stderr,none": 0.00965693088601476,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3248995983935743,
						"acc_stderr,none": 0.009387421581685764,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5277108433734939,
						"acc_stderr,none": 0.010006669313970314,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.38714859437751004,
						"acc_stderr,none": 0.009763465328590645,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.41767068273092367,
						"acc_stderr,none": 0.00988527772784016,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3313253012048193,
						"acc_stderr,none": 0.009434574056101966,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.3522088353413655,
						"acc_stderr,none": 0.009574259292495741,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3397590361445783,
						"acc_stderr,none": 0.009493454925438252,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3329317269076305,
						"acc_stderr,none": 0.009446051001358226,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.0094769768497786,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3321285140562249,
						"acc_stderr,none": 0.009440328001240637,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667053,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3397590361445783,
						"acc_stderr,none": 0.00949345492543824,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5279465736116961,
						"acc_stderr,none": 0.07960746269436779,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.4751819986763733,
						"acc_stderr,none": 0.012851264962354843,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7875579086697552,
						"acc_stderr,none": 0.010526234937585395,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5823957643944407,
						"acc_stderr,none": 0.012691211382848639,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.499669093315685,
						"acc_stderr,none": 0.012867122498493415,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.4937127729980146,
						"acc_stderr,none": 0.012866108021218212,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.4923891462607545,
						"acc_stderr,none": 0.012865634571114483,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.47253474520185307,
						"acc_stderr,none": 0.012847698270388229,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.4923891462607545,
						"acc_stderr,none": 0.012865634571114483,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.4884182660489742,
						"acc_stderr,none": 0.012863672949335884,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5095962938451357,
						"acc_stderr,none": 0.012864755260408957,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.513567174056916,
						"acc_stderr,none": 0.012862387586650077,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.698583951449764,
						"acc_stderr,none": 0.09689504243701293,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8550537634408603,
						"acc_stderr,none": 0.007302677492920836,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.5542168674698795,
						"acc_stderr,none": 0.05489019318889363,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5099061522419187,
						"acc_stderr,none": 0.01615109593635894,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.5399239543726235,
						"acc_stderr,none": 0.030791472862142368,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5333333333333333,
						"acc_stderr,none": 0.02815385894564889,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.5456349206349206,
						"acc_stderr,none": 0.022200847780753458,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "microsoft/phi-2"
	},
	"mistralai/Mistral-7B-Instruct-v0.2": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.7237880496054115,
						"acc_norm,none": 0.6992671927846674,
						"acc_norm_stderr,none": 0.06575368411217784,
						"acc_stderr,none": 0.0870545951521031,
						"alias": "ai2_arc"
					},
					"anli": {
						"acc,none": 0.470625,
						"acc_stderr,none": 0.016323170842139138,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.86535,
						"acc_stderr,none": 0.1044357382014318,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8340597014925373,
						"acc_stderr,none": 0.13897696485795538,
						"alias": "blimp"
					},
					"ceval-valid": {
						"acc,none": 0.43684992570579495,
						"acc_norm,none": 0.43684992570579495,
						"acc_norm_stderr,none": 0.14261591689704498,
						"acc_stderr,none": 0.14261591689704498,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.42220687273355206,
						"acc_norm,none": 0.42220687273355206,
						"acc_norm_stderr,none": 0.10478605809778388,
						"acc_stderr,none": 0.10478605809778388,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.708786246175203,
						"likelihood_diff_stderr,none": 0.4977051425278663,
						"pct_stereotype,none": 0.586463923673226,
						"pct_stereotype_stderr,none": 0.08249358792815063
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.061515748031496065,
						"exact_match_stderr,none": 0.005331527918306684
					},
					"glue": {
						"acc,none": 0.5822142909581908,
						"acc_stderr,none": 0.032790502714334625,
						"alias": "glue",
						"f1,none": 0.4411327873267142,
						"f1_stderr,none": 0.0012831556331226797,
						"mcc,none": 0.1275893167139847,
						"mcc_stderr,none": 0.0010517992291010554
					},
					"kmmlu": {
						"acc,none": 0.34704013860814326,
						"acc_norm,none": 0.34704013860814326,
						"acc_norm_stderr,none": 0.07238936581650249,
						"acc_stderr,none": 0.07238936581650249,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.6033764525323394,
						"acc_norm,none": 0.496,
						"acc_norm_stderr,none": 0.0005009699398797607,
						"acc_stderr,none": 0.10436362536496387,
						"alias": "kobest",
						"f1,none": 0.5553821902279571,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6832912866291481,
						"acc_stderr,none": 0.01690798528858505,
						"alias": "lambada",
						"perplexity,none": 3.786399904636776,
						"perplexity_stderr,none": 0.21254011171654186
					},
					"lambada_cloze": {
						"acc,none": 0.2092955559868038,
						"acc_stderr,none": 0.048106457496816746,
						"alias": "lambada_cloze",
						"perplexity,none": 79.69768925561641,
						"perplexity_stderr,none": 21.41458436331952
					},
					"mmlu": {
						"acc,none": 0.5901580971371599,
						"acc_stderr,none": 0.1308916942503901,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.5385759829968119,
						"acc_stderr,none": 0.14701376176218026,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.664628258770518,
						"acc_stderr,none": 0.1089782488947092,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.6889827754306143,
						"acc_stderr,none": 0.08980026474895134,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.497304154773232,
						"acc_stderr,none": 0.10643423341212979,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.5175301632363378,
						"acc_norm,none": 0.4778006153543328,
						"acc_norm_stderr,none": 0.0001606261257056986,
						"acc_stderr,none": 0.0694061118891545,
						"alias": "stem"
					},
					"qa4mre": {
						"acc,none": 0.5460992907801419,
						"acc_norm,none": 0.5815602836879432,
						"acc_norm_stderr,none": 0.07866017109546872,
						"acc_stderr,none": 0.06008175299623157,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.8909853249475891,
						"acc_stderr,none": 0.06497187134187173,
						"alias": "sycophancy"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.7237880496054115,
						"acc_norm,none": 0.6992671927846674,
						"acc_norm_stderr,none": 0.06575368411217784,
						"acc_stderr,none": 0.0870545951521031,
						"alias": "ai2_arc"
					},
					"anli": {
						"acc,none": 0.470625,
						"acc_stderr,none": 0.016323170842139138,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.482,
						"acc_stderr,none": 0.015809045699406728,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.015768596914394382,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.01441375952760986,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.5401023890784983,
						"acc_norm,none": 0.5614334470989761,
						"acc_norm_stderr,none": 0.014500682618212867,
						"acc_stderr,none": 0.014564318856924848,
						"alias": " - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.8143939393939394,
						"acc_norm,none": 0.7672558922558923,
						"acc_norm_stderr,none": 0.008671169120579301,
						"acc_stderr,none": 0.007977770454202346,
						"alias": " - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.86535,
						"acc_stderr,none": 0.1044357382014318,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.639,
						"acc_stderr,none": 0.010742308811391417,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.9875,
						"acc_stderr,none": 0.002484947178762673,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.5685,
						"acc_stderr,none": 0.011077690761900849,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0017272787111155127,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.9675,
						"acc_stderr,none": 0.003966073608738821,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.004382876316119542,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.005997998665721458,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.006237543865716644,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.8705,
						"acc_stderr,none": 0.007509532045059017,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.8235,
						"acc_stderr,none": 0.008527029383968144,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8340597014925373,
						"acc_stderr,none": 0.13897696485795538,
						"alias": "blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996688,
						"alias": " - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998383,
						"alias": " - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448812,
						"alias": " - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557843983,
						"alias": " - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651514,
						"alias": " - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.013127502859696232,
						"alias": " - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.614,
						"acc_stderr,none": 0.015402637476784376,
						"alias": " - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319325,
						"alias": " - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.00982000165134572,
						"alias": " - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.003845749574503001,
						"alias": " - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.0049395748196984675,
						"alias": " - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704159,
						"alias": " - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.959,
						"acc_stderr,none": 0.006273624021118755,
						"alias": " - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.00669595667816304,
						"alias": " - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400241,
						"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291603,
						"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033844,
						"alias": " - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118581,
						"alias": " - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.835,
						"acc_stderr,none": 0.01174363286691615,
						"alias": " - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.821,
						"acc_stderr,none": 0.012128730605719092,
						"alias": " - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747377,
						"alias": " - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866435,
						"alias": " - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.01182860583145427,
						"alias": " - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565786,
						"alias": " - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.541,
						"acc_stderr,none": 0.015766025737882165,
						"alias": " - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": " - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.01182860583145426,
						"alias": " - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.0146326386586329,
						"alias": " - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341681,
						"alias": " - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832009,
						"alias": " - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787726,
						"alias": " - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244068,
						"alias": " - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139973,
						"alias": " - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.015594460144140601,
						"alias": " - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240656,
						"alias": " - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.595,
						"acc_stderr,none": 0.015531136990453042,
						"alias": " - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.015445859463771304,
						"alias": " - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.681,
						"acc_stderr,none": 0.014746404865473487,
						"alias": " - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792925,
						"alias": " - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.014078856992462621,
						"alias": " - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.00997775303139724,
						"alias": " - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.00940661918462123,
						"alias": " - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.771,
						"acc_stderr,none": 0.013294199326613621,
						"alias": " - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": " - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140911,
						"alias": " - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045074,
						"alias": " - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.012461592646659988,
						"alias": " - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.702,
						"acc_stderr,none": 0.0144708467411347,
						"alias": " - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.449,
						"acc_stderr,none": 0.01573679276875202,
						"alias": " - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.00696042006257141,
						"alias": " - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.00914437639315109,
						"alias": " - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298163,
						"alias": " - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.785,
						"acc_stderr,none": 0.012997843819031808,
						"alias": " - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.472,
						"acc_stderr,none": 0.015794475789511476,
						"alias": " - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103327,
						"alias": " - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.01312750285969624,
						"alias": " - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.657,
						"acc_stderr,none": 0.015019206922356951,
						"alias": " - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340988,
						"alias": " - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946087,
						"alias": " - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.012795613612786543,
						"alias": " - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.01070937396352803,
						"alias": " - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333337,
						"alias": " - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866442,
						"alias": " - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410054,
						"alias": " - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099197,
						"alias": " - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.394,
						"acc_stderr,none": 0.01545972195749338,
						"alias": " - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.343,
						"acc_stderr,none": 0.015019206922356951,
						"alias": " - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.8538226299694189,
						"acc_stderr,none": 0.006178975060597746,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.7678571428571429,
						"acc_stderr,none": 0.0569293902400011,
						"alias": "cb",
						"f1,none": 0.6534278959810874,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.43684992570579495,
						"acc_norm,none": 0.43684992570579495,
						"acc_norm_stderr,none": 0.14261591689704498,
						"acc_stderr,none": 0.14261591689704498,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.3877551020408163,
						"acc_norm,none": 0.3877551020408163,
						"acc_norm_stderr,none": 0.07032677934739909,
						"acc_stderr,none": 0.07032677934739909,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275461,
						"acc_stderr,none": 0.08124094920275461,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3939393939393939,
						"acc_norm,none": 0.3939393939393939,
						"acc_norm_stderr,none": 0.08637692614387409,
						"acc_stderr,none": 0.08637692614387409,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.3404255319148936,
						"acc_norm,none": 0.3404255319148936,
						"acc_norm_stderr,none": 0.06986570800554745,
						"acc_stderr,none": 0.06986570800554745,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.10094660663590604,
						"acc_stderr,none": 0.10094660663590604,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.41818181818181815,
						"acc_norm,none": 0.41818181818181815,
						"acc_norm_stderr,none": 0.0671242332357016,
						"acc_stderr,none": 0.0671242332357016,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.5675675675675675,
						"acc_norm,none": 0.5675675675675675,
						"acc_norm_stderr,none": 0.08256893144064577,
						"acc_stderr,none": 0.08256893144064577,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.5714285714285714,
						"acc_norm,none": 0.5714285714285714,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.5789473684210527,
						"acc_norm,none": 0.5789473684210527,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.11967838846954226,
						"acc_stderr,none": 0.11967838846954226,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.4482758620689655,
						"acc_norm,none": 0.4482758620689655,
						"acc_norm_stderr,none": 0.09398415777506855,
						"acc_stderr,none": 0.09398415777506855,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.4594594594594595,
						"acc_norm,none": 0.4594594594594595,
						"acc_norm_stderr,none": 0.08305895907471073,
						"acc_stderr,none": 0.08305895907471073,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.45161290322580644,
						"acc_norm,none": 0.45161290322580644,
						"acc_norm_stderr,none": 0.09085862440549508,
						"acc_stderr,none": 0.09085862440549508,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.3870967741935484,
						"acc_norm,none": 0.3870967741935484,
						"acc_norm_stderr,none": 0.08892934678767887,
						"acc_stderr,none": 0.08892934678767887,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.65,
						"acc_norm,none": 0.65,
						"acc_norm_stderr,none": 0.10942433098048311,
						"acc_stderr,none": 0.10942433098048311,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.631578947368421,
						"acc_norm,none": 0.631578947368421,
						"acc_norm_stderr,none": 0.11369720523522561,
						"acc_stderr,none": 0.11369720523522561,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.6842105263157895,
						"acc_norm,none": 0.6842105263157895,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.6956521739130435,
						"acc_norm,none": 0.6956521739130435,
						"acc_norm_stderr,none": 0.09810018692482893,
						"acc_stderr,none": 0.09810018692482893,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.1042572070285374,
						"acc_stderr,none": 0.1042572070285374,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.4583333333333333,
						"acc_norm,none": 0.4583333333333333,
						"acc_norm_stderr,none": 0.10389457216622948,
						"acc_stderr,none": 0.10389457216622948,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.5714285714285714,
						"acc_norm,none": 0.5714285714285714,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.15075567228888181,
						"acc_stderr,none": 0.15075567228888181,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.5909090909090909,
						"acc_norm,none": 0.5909090909090909,
						"acc_norm_stderr,none": 0.10729033533674225,
						"acc_stderr,none": 0.10729033533674225,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.5714285714285714,
						"acc_norm,none": 0.5714285714285714,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.391304347826087,
						"acc_norm,none": 0.391304347826087,
						"acc_norm_stderr,none": 0.10405096111532161,
						"acc_stderr,none": 0.10405096111532161,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.32653061224489793,
						"acc_norm,none": 0.32653061224489793,
						"acc_norm_stderr,none": 0.06768622021133469,
						"acc_stderr,none": 0.06768622021133469,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.10910894511799618,
						"acc_stderr,none": 0.10910894511799618,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.4827586206896552,
						"acc_norm,none": 0.4827586206896552,
						"acc_norm_stderr,none": 0.09443492370778725,
						"acc_stderr,none": 0.09443492370778725,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.3673469387755102,
						"acc_norm,none": 0.3673469387755102,
						"acc_norm_stderr,none": 0.06958255967849925,
						"acc_stderr,none": 0.06958255967849925,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.5909090909090909,
						"acc_norm,none": 0.5909090909090909,
						"acc_norm_stderr,none": 0.07497837474124878,
						"acc_stderr,none": 0.07497837474124878,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.5652173913043478,
						"acc_norm,none": 0.5652173913043478,
						"acc_norm_stderr,none": 0.07389883353033021,
						"acc_stderr,none": 0.07389883353033021,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.10154334054280735,
						"acc_stderr,none": 0.10154334054280735,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.42220687273355206,
						"acc_norm,none": 0.42220687273355206,
						"acc_norm_stderr,none": 0.10478605809778388,
						"acc_stderr,none": 0.10478605809778388,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.3905325443786982,
						"acc_norm,none": 0.3905325443786982,
						"acc_norm_stderr,none": 0.03763996705629265,
						"acc_stderr,none": 0.03763996705629265,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.36875,
						"acc_norm,none": 0.36875,
						"acc_norm_stderr,none": 0.03826204233503226,
						"acc_stderr,none": 0.03826204233503226,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.03756335775187897,
						"acc_stderr,none": 0.03756335775187897,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.49760765550239233,
						"acc_norm,none": 0.49760765550239233,
						"acc_norm_stderr,none": 0.03466836542150577,
						"acc_stderr,none": 0.03466836542150577,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.03782614981812041,
						"acc_stderr,none": 0.03782614981812041,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.6335877862595419,
						"acc_norm,none": 0.6335877862595419,
						"acc_norm_stderr,none": 0.04225875451969636,
						"acc_stderr,none": 0.04225875451969636,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3161764705882353,
						"acc_norm,none": 0.3161764705882353,
						"acc_norm_stderr,none": 0.040019338846834944,
						"acc_stderr,none": 0.040019338846834944,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.42990654205607476,
						"acc_norm,none": 0.42990654205607476,
						"acc_norm_stderr,none": 0.048084723494299535,
						"acc_stderr,none": 0.048084723494299535,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.4674922600619195,
						"acc_norm,none": 0.4674922600619195,
						"acc_norm_stderr,none": 0.027804957713129835,
						"acc_stderr,none": 0.027804957713129835,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3627450980392157,
						"acc_norm,none": 0.3627450980392157,
						"acc_norm_stderr,none": 0.033744993563193555,
						"acc_stderr,none": 0.033744993563193555,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.5195530726256983,
						"acc_norm,none": 0.5195530726256983,
						"acc_norm_stderr,none": 0.037447917191364796,
						"acc_stderr,none": 0.037447917191364796,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0306858205966108,
						"acc_stderr,none": 0.0306858205966108,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.3018867924528302,
						"acc_norm,none": 0.3018867924528302,
						"acc_norm_stderr,none": 0.044801270921106716,
						"acc_stderr,none": 0.044801270921106716,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.4485981308411215,
						"acc_norm,none": 0.4485981308411215,
						"acc_norm_stderr,none": 0.04830698295619321,
						"acc_stderr,none": 0.04830698295619321,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.4716981132075472,
						"acc_norm,none": 0.4716981132075472,
						"acc_norm_stderr,none": 0.04871677165040775,
						"acc_stderr,none": 0.04871677165040775,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.28703703703703703,
						"acc_norm,none": 0.28703703703703703,
						"acc_norm_stderr,none": 0.043733130409147614,
						"acc_stderr,none": 0.043733130409147614,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919942,
						"acc_stderr,none": 0.04336290903919942,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.46226415094339623,
						"acc_norm,none": 0.46226415094339623,
						"acc_norm_stderr,none": 0.04865583757821749,
						"acc_stderr,none": 0.04865583757821749,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.37362637362637363,
						"acc_norm,none": 0.37362637362637363,
						"acc_norm_stderr,none": 0.02933263256052554,
						"acc_stderr,none": 0.02933263256052554,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.5147058823529411,
						"acc_norm,none": 0.5147058823529411,
						"acc_norm_stderr,none": 0.03507793834791324,
						"acc_stderr,none": 0.03507793834791324,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.6023391812865497,
						"acc_norm,none": 0.6023391812865497,
						"acc_norm_stderr,none": 0.0375363895576169,
						"acc_stderr,none": 0.0375363895576169,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.41496598639455784,
						"acc_norm,none": 0.41496598639455784,
						"acc_norm_stderr,none": 0.040777479727739804,
						"acc_stderr,none": 0.040777479727739804,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.381294964028777,
						"acc_norm,none": 0.381294964028777,
						"acc_norm_stderr,none": 0.041345934945119074,
						"acc_stderr,none": 0.041345934945119074,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.5031446540880503,
						"acc_norm,none": 0.5031446540880503,
						"acc_norm_stderr,none": 0.03977707748639468,
						"acc_stderr,none": 0.03977707748639468,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.4785276073619632,
						"acc_norm,none": 0.4785276073619632,
						"acc_norm_stderr,none": 0.0392474687675113,
						"acc_stderr,none": 0.0392474687675113,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.43023255813953487,
						"acc_norm,none": 0.43023255813953487,
						"acc_norm_stderr,none": 0.03786189925946143,
						"acc_stderr,none": 0.03786189925946143,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.027751792418790923,
						"acc_stderr,none": 0.027751792418790923,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.40404040404040403,
						"acc_norm,none": 0.40404040404040403,
						"acc_norm_stderr,none": 0.034961309720561266,
						"acc_stderr,none": 0.034961309720561266,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.6764705882352942,
						"acc_norm,none": 0.6764705882352942,
						"acc_norm_stderr,none": 0.030388353551886793,
						"acc_stderr,none": 0.030388353551886793,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.028361099300075063,
						"acc_stderr,none": 0.028361099300075063,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.34814814814814815,
						"acc_norm,none": 0.34814814814814815,
						"acc_norm_stderr,none": 0.041153246103369526,
						"acc_stderr,none": 0.041153246103369526,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.4125874125874126,
						"acc_norm,none": 0.4125874125874126,
						"acc_norm_stderr,none": 0.04131287692392344,
						"acc_stderr,none": 0.04131287692392344,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.036363636363636376,
						"acc_stderr,none": 0.036363636363636376,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.42953020134228187,
						"acc_norm,none": 0.42953020134228187,
						"acc_norm_stderr,none": 0.04068949724015223,
						"acc_stderr,none": 0.04068949724015223,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.39644970414201186,
						"acc_norm,none": 0.39644970414201186,
						"acc_norm_stderr,none": 0.03773949997679294,
						"acc_stderr,none": 0.03773949997679294,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.3409090909090909,
						"acc_norm,none": 0.3409090909090909,
						"acc_norm_stderr,none": 0.04141487016241484,
						"acc_stderr,none": 0.04141487016241484,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.423728813559322,
						"acc_norm,none": 0.423728813559322,
						"acc_norm_stderr,none": 0.04568404181144862,
						"acc_stderr,none": 0.04568404181144862,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.03334645408665339,
						"acc_stderr,none": 0.03334645408665339,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.047093069786618966,
						"acc_stderr,none": 0.047093069786618966,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.4195804195804196,
						"acc_norm,none": 0.4195804195804196,
						"acc_norm_stderr,none": 0.041412787292137106,
						"acc_stderr,none": 0.041412787292137106,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.04472135954999579,
						"acc_stderr,none": 0.04472135954999579,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.3945945945945946,
						"acc_norm,none": 0.3945945945945946,
						"acc_norm_stderr,none": 0.0360321188626959,
						"acc_stderr,none": 0.0360321188626959,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.5058139534883721,
						"acc_norm,none": 0.5058139534883721,
						"acc_norm_stderr,none": 0.03823337064994852,
						"acc_stderr,none": 0.03823337064994852,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.44525547445255476,
						"acc_norm,none": 0.44525547445255476,
						"acc_norm_stderr,none": 0.02454478420191345,
						"acc_stderr,none": 0.02454478420191345,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.7336448598130841,
						"acc_norm,none": 0.7336448598130841,
						"acc_norm_stderr,none": 0.030288912386133213,
						"acc_stderr,none": 0.030288912386133213,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.4715447154471545,
						"acc_norm,none": 0.4715447154471545,
						"acc_norm_stderr,none": 0.04519450648295478,
						"acc_stderr,none": 0.04519450648295478,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.4426229508196721,
						"acc_norm,none": 0.4426229508196721,
						"acc_norm_stderr,none": 0.04515426947106744,
						"acc_stderr,none": 0.04515426947106744,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.5285714285714286,
						"acc_norm,none": 0.5285714285714286,
						"acc_norm_stderr,none": 0.03452921053595503,
						"acc_stderr,none": 0.03452921053595503,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.5611111111111111,
						"acc_norm,none": 0.5611111111111111,
						"acc_norm_stderr,none": 0.0370915696198558,
						"acc_stderr,none": 0.0370915696198558,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.4708994708994709,
						"acc_norm,none": 0.4708994708994709,
						"acc_norm_stderr,none": 0.036404433270336836,
						"acc_stderr,none": 0.036404433270336836,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3275862068965517,
						"acc_norm,none": 0.3275862068965517,
						"acc_norm_stderr,none": 0.04376552980994349,
						"acc_stderr,none": 0.04376552980994349,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3724137931034483,
						"acc_norm,none": 0.3724137931034483,
						"acc_norm_stderr,none": 0.04028731532947558,
						"acc_stderr,none": 0.04028731532947558,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.4380952380952381,
						"acc_norm,none": 0.4380952380952381,
						"acc_norm_stderr,none": 0.048651804501824956,
						"acc_stderr,none": 0.048651804501824956,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.46285714285714286,
						"acc_norm,none": 0.46285714285714286,
						"acc_norm_stderr,none": 0.03780017090541436,
						"acc_stderr,none": 0.03780017090541436,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.3127962085308057,
						"acc_norm,none": 0.3127962085308057,
						"acc_norm_stderr,none": 0.031993655655275954,
						"acc_stderr,none": 0.031993655655275954,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.28191489361702127,
						"acc_norm,none": 0.28191489361702127,
						"acc_norm_stderr,none": 0.023234393263661224,
						"acc_stderr,none": 0.023234393263661224,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.43103448275862066,
						"acc_norm,none": 0.43103448275862066,
						"acc_norm_stderr,none": 0.03258314422493334,
						"acc_stderr,none": 0.03258314422493334,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.45977011494252873,
						"acc_norm,none": 0.45977011494252873,
						"acc_norm_stderr,none": 0.03789104827773084,
						"acc_stderr,none": 0.03789104827773084,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.45925925925925926,
						"acc_norm,none": 0.45925925925925926,
						"acc_norm_stderr,none": 0.04304979692464243,
						"acc_stderr,none": 0.04304979692464243,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.4911504424778761,
						"acc_norm,none": 0.4911504424778761,
						"acc_norm_stderr,none": 0.03332811194650095,
						"acc_stderr,none": 0.03332811194650095,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.46060606060606063,
						"acc_norm,none": 0.46060606060606063,
						"acc_norm_stderr,none": 0.03892207016552013,
						"acc_stderr,none": 0.03892207016552013,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.3783783783783784,
						"acc_norm,none": 0.3783783783783784,
						"acc_norm_stderr,none": 0.03575339609546739,
						"acc_stderr,none": 0.03575339609546739,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.48520710059171596,
						"acc_norm,none": 0.48520710059171596,
						"acc_norm_stderr,none": 0.03855895070315001,
						"acc_stderr,none": 0.03855895070315001,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.4968944099378882,
						"acc_norm,none": 0.4968944099378882,
						"acc_norm_stderr,none": 0.039527708265086496,
						"acc_stderr,none": 0.039527708265086496,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.45,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.03945381823835186,
						"acc_stderr,none": 0.03945381823835186,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.1275893167139847,
						"mcc_stderr,none": 0.03243145431677487
					},
					"copa": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.027265992434429086,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.708786246175203,
						"likelihood_diff_stderr,none": 0.4977051425278663,
						"pct_stereotype,none": 0.586463923673226,
						"pct_stereotype_stderr,none": 0.08249358792815063
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 4.7318490926325145,
						"likelihood_diff_stderr,none": 0.11342322831385568,
						"pct_stereotype,none": 0.654144305307096,
						"pct_stereotype_stderr,none": 0.011618424517571955
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.537944248744419,
						"likelihood_diff_stderr,none": 0.4369966519695147,
						"pct_stereotype,none": 0.6703296703296703,
						"pct_stereotype_stderr,none": 0.04955219508596587
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 7.3852669108997695,
						"likelihood_diff_stderr,none": 3.1266330530899724,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 7.576054441011869,
						"likelihood_diff_stderr,none": 0.8834566291940967,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 4.158607739210129,
						"likelihood_diff_stderr,none": 0.21464841768933213,
						"pct_stereotype,none": 0.553125,
						"pct_stereotype_stderr,none": 0.027836160509246814
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 4.096699096538402,
						"likelihood_diff_stderr,none": 0.3010726414449652,
						"pct_stereotype,none": 0.6342592592592593,
						"pct_stereotype_stderr,none": 0.032847388576472056
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.959399302800496,
						"likelihood_diff_stderr,none": 0.5163626886218743,
						"pct_stereotype,none": 0.6944444444444444,
						"pct_stereotype_stderr,none": 0.05466818705978919
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 4.81732547189307,
						"likelihood_diff_stderr,none": 0.2159233883820022,
						"pct_stereotype,none": 0.6338582677165354,
						"pct_stereotype_stderr,none": 0.021395218002640975
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.467977901836774,
						"likelihood_diff_stderr,none": 0.41756093801938654,
						"pct_stereotype,none": 0.7117117117117117,
						"pct_stereotype_stderr,none": 0.04318860867532052
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.250740379415532,
						"likelihood_diff_stderr,none": 0.4624374274101435,
						"pct_stereotype,none": 0.8494623655913979,
						"pct_stereotype_stderr,none": 0.03728212869390004
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.964149414865594,
						"likelihood_diff_stderr,none": 0.2888250878536635,
						"pct_stereotype,none": 0.7263157894736842,
						"pct_stereotype_stderr,none": 0.03243072906189839
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 4.685419388590786,
						"likelihood_diff_stderr,none": 0.1197603523248747,
						"pct_stereotype,none": 0.5205724508050089,
						"pct_stereotype_stderr,none": 0.012202956874643714
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 4.185000652737088,
						"likelihood_diff_stderr,none": 0.5570851429106586,
						"pct_stereotype,none": 0.5111111111111111,
						"pct_stereotype_stderr,none": 0.05298680599073449
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.604593717134916,
						"likelihood_diff_stderr,none": 1.010792669095801,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.590537735910127,
						"likelihood_diff_stderr,none": 0.6032956432563803,
						"pct_stereotype,none": 0.6818181818181818,
						"pct_stereotype_stderr,none": 0.057771719027476576
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 4.326776760018132,
						"likelihood_diff_stderr,none": 0.24224297198072423,
						"pct_stereotype,none": 0.5077881619937694,
						"pct_stereotype_stderr,none": 0.02794745876935634
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 5.112244511781474,
						"likelihood_diff_stderr,none": 0.3158921225152337,
						"pct_stereotype,none": 0.383399209486166,
						"pct_stereotype_stderr,none": 0.030628616122857784
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.9224590725368924,
						"likelihood_diff_stderr,none": 0.457093252573342,
						"pct_stereotype,none": 0.5694444444444444,
						"pct_stereotype_stderr,none": 0.05876396677084613
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 4.7455005562823755,
						"likelihood_diff_stderr,none": 0.24087972130069762,
						"pct_stereotype,none": 0.4782608695652174,
						"pct_stereotype_stderr,none": 0.023315932363473738
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.60599773241126,
						"likelihood_diff_stderr,none": 0.3299566111671841,
						"pct_stereotype,none": 0.5043478260869565,
						"pct_stereotype_stderr,none": 0.04682752006203916
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 5.098599989335615,
						"likelihood_diff_stderr,none": 0.4065199601405249,
						"pct_stereotype,none": 0.7582417582417582,
						"pct_stereotype_stderr,none": 0.04513082148355001
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 5.311142940910495,
						"likelihood_diff_stderr,none": 0.43270114350722544,
						"pct_stereotype,none": 0.6071428571428571,
						"pct_stereotype_stderr,none": 0.03497401292852224
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.061515748031496065,
						"exact_match_stderr,none": 0.005331527918306684
					},
					"glue": {
						"acc,none": 0.5822142909581908,
						"acc_stderr,none": 0.032790502714334625,
						"alias": "glue",
						"f1,none": 0.4411327873267142,
						"f1_stderr,none": 0.0012831556331226797,
						"mcc,none": 0.1275893167139847,
						"mcc_stderr,none": 0.0010517992291010554
					},
					"hellaswag": {
						"acc,none": 0.6602270464050985,
						"acc_norm,none": 0.8366859191396137,
						"acc_norm_stderr,none": 0.003688965231733516,
						"acc_stderr,none": 0.004726640532562062,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.34704013860814326,
						"acc_norm,none": 0.34704013860814326,
						"acc_norm_stderr,none": 0.07238936581650249,
						"acc_stderr,none": 0.07238936581650249,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.042923469599092816,
						"acc_stderr,none": 0.042923469599092816,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.014356395999905694,
						"acc_stderr,none": 0.014356395999905694,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.014566646394664377,
						"acc_stderr,none": 0.014566646394664377,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.303,
						"acc_norm,none": 0.303,
						"acc_norm_stderr,none": 0.014539683710535269,
						"acc_stderr,none": 0.014539683710535269,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.319,
						"acc_norm,none": 0.319,
						"acc_norm_stderr,none": 0.014746404865473487,
						"acc_stderr,none": 0.014746404865473487,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.29833333333333334,
						"acc_norm,none": 0.29833333333333334,
						"acc_norm_stderr,none": 0.018694028559022177,
						"acc_stderr,none": 0.018694028559022177,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.352,
						"acc_norm,none": 0.352,
						"acc_norm_stderr,none": 0.015110404505648671,
						"acc_stderr,none": 0.015110404505648671,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.592,
						"acc_norm,none": 0.592,
						"acc_norm_stderr,none": 0.015549205052920676,
						"acc_stderr,none": 0.015549205052920676,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.295,
						"acc_norm,none": 0.295,
						"acc_norm_stderr,none": 0.01442855443844551,
						"acc_stderr,none": 0.01442855443844551,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.031652557907861936,
						"acc_stderr,none": 0.031652557907861936,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.373,
						"acc_norm,none": 0.373,
						"acc_norm_stderr,none": 0.015300493622922809,
						"acc_stderr,none": 0.015300493622922809,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3384615384615385,
						"acc_norm,none": 0.3384615384615385,
						"acc_norm_stderr,none": 0.041661735408389584,
						"acc_stderr,none": 0.041661735408389584,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.45,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.05,
						"acc_stderr,none": 0.05,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.324,
						"acc_norm,none": 0.324,
						"acc_norm_stderr,none": 0.014806864733738854,
						"acc_stderr,none": 0.014806864733738854,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.435,
						"acc_norm,none": 0.435,
						"acc_norm_stderr,none": 0.015685057252717193,
						"acc_stderr,none": 0.015685057252717193,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.277,
						"acc_norm,none": 0.277,
						"acc_norm_stderr,none": 0.014158794845306265,
						"acc_stderr,none": 0.014158794845306265,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.014356395999905687,
						"acc_stderr,none": 0.014356395999905687,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.331,
						"acc_norm,none": 0.331,
						"acc_norm_stderr,none": 0.014888272588203922,
						"acc_stderr,none": 0.014888272588203922,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.322,
						"acc_norm,none": 0.322,
						"acc_norm_stderr,none": 0.014782913600996669,
						"acc_stderr,none": 0.014782913600996669,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.014205696104091501,
						"acc_stderr,none": 0.014205696104091501,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.328,
						"acc_norm,none": 0.328,
						"acc_norm_stderr,none": 0.014853842487270336,
						"acc_stderr,none": 0.014853842487270336,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.36,
						"acc_norm,none": 0.36,
						"acc_norm_stderr,none": 0.048241815132442176,
						"acc_stderr,none": 0.048241815132442176,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.36,
						"acc_norm,none": 0.36,
						"acc_norm_stderr,none": 0.015186527932040115,
						"acc_stderr,none": 0.015186527932040115,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.537,
						"acc_norm,none": 0.537,
						"acc_norm_stderr,none": 0.01577592722726242,
						"acc_stderr,none": 0.01577592722726242,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.379,
						"acc_norm,none": 0.379,
						"acc_norm_stderr,none": 0.015349091002225349,
						"acc_stderr,none": 0.015349091002225349,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.336,
						"acc_norm,none": 0.336,
						"acc_norm_stderr,none": 0.01494414023379502,
						"acc_stderr,none": 0.01494414023379502,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.317,
						"acc_norm,none": 0.317,
						"acc_norm_stderr,none": 0.014721675438880226,
						"acc_stderr,none": 0.014721675438880226,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.391,
						"acc_norm,none": 0.391,
						"acc_norm_stderr,none": 0.015438826294681783,
						"acc_stderr,none": 0.015438826294681783,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.355,
						"acc_norm,none": 0.355,
						"acc_norm_stderr,none": 0.019551524326912272,
						"acc_stderr,none": 0.019551524326912272,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.512,
						"acc_norm,none": 0.512,
						"acc_norm_stderr,none": 0.015814743314581818,
						"acc_stderr,none": 0.015814743314581818,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.297,
						"acc_norm,none": 0.297,
						"acc_norm_stderr,none": 0.014456832294801103,
						"acc_stderr,none": 0.014456832294801103,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.288,
						"acc_norm,none": 0.288,
						"acc_norm_stderr,none": 0.01432694179723156,
						"acc_stderr,none": 0.01432694179723156,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361427,
						"acc_stderr,none": 0.014498627873361427,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.31,
						"acc_norm,none": 0.31,
						"acc_norm_stderr,none": 0.04648231987117316,
						"acc_stderr,none": 0.04648231987117316,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.38666666666666666,
						"acc_norm,none": 0.38666666666666666,
						"acc_norm_stderr,none": 0.02816313890819685,
						"acc_stderr,none": 0.02816313890819685,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.01435639599990569,
						"acc_stderr,none": 0.01435639599990569,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.334,
						"acc_norm,none": 0.334,
						"acc_norm_stderr,none": 0.014922019523732958,
						"acc_stderr,none": 0.014922019523732958,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462623,
						"acc_stderr,none": 0.014078856992462623,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.345,
						"acc_norm,none": 0.345,
						"acc_norm_stderr,none": 0.03369796379336736,
						"acc_stderr,none": 0.03369796379336736,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.272,
						"acc_norm,none": 0.272,
						"acc_norm_stderr,none": 0.014078856992462618,
						"acc_stderr,none": 0.014078856992462618,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.364,
						"acc_norm,none": 0.364,
						"acc_norm_stderr,none": 0.015222868840522019,
						"acc_stderr,none": 0.015222868840522019,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.36,
						"acc_norm,none": 0.36,
						"acc_norm_stderr,none": 0.03402629784040015,
						"acc_stderr,none": 0.03402629784040015,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.469,
						"acc_norm,none": 0.469,
						"acc_norm_stderr,none": 0.015788865959538996,
						"acc_stderr,none": 0.015788865959538996,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.6033764525323394,
						"acc_norm,none": 0.496,
						"acc_norm_stderr,none": 0.0005009699398797607,
						"acc_stderr,none": 0.10436362536496387,
						"alias": "kobest",
						"f1,none": 0.5553821902279571,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.811965811965812,
						"acc_stderr,none": 0.010431780632246387,
						"alias": " - kobest_boolq",
						"f1,none": 0.8116217798594848,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.587,
						"acc_stderr,none": 0.015577986829936531,
						"alias": " - kobest_copa",
						"f1,none": 0.5854574873102816,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.404,
						"acc_norm,none": 0.496,
						"acc_norm_stderr,none": 0.02238235778196213,
						"acc_stderr,none": 0.02196663529383292,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.401473434891252,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5289672544080605,
						"acc_stderr,none": 0.02508374348663252,
						"alias": " - kobest_sentineg",
						"f1,none": 0.4376216773098804,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4865079365079365,
						"acc_stderr,none": 0.014086365971849188,
						"alias": " - kobest_wic",
						"f1,none": 0.3441676032214804,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6832912866291481,
						"acc_stderr,none": 0.01690798528858505,
						"alias": "lambada",
						"perplexity,none": 3.786399904636776,
						"perplexity_stderr,none": 0.21254011171654186
					},
					"lambada_cloze": {
						"acc,none": 0.2092955559868038,
						"acc_stderr,none": 0.048106457496816746,
						"alias": "lambada_cloze",
						"perplexity,none": 79.69768925561641,
						"perplexity_stderr,none": 21.41458436331952
					},
					"lambada_openai": {
						"acc,none": 0.7145352222006598,
						"acc_stderr,none": 0.006292165813769896,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3977925600261814,
						"perplexity_stderr,none": 0.07203596942011029
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.30487094896176986,
						"acc_stderr,none": 0.006413613926848414,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 37.327498045923896,
						"perplexity_stderr,none": 1.1669176082652388
					},
					"lambada_standard": {
						"acc,none": 0.6520473510576363,
						"acc_stderr,none": 0.006636081541776578,
						"alias": " - lambada_standard",
						"perplexity,none": 4.17500724924737,
						"perplexity_stderr,none": 0.09820231784231818
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.11372016301183777,
						"acc_stderr,none": 0.004422992919917964,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 122.06788046530893,
						"perplexity_stderr,none": 4.255042040526601
					},
					"logiqa": {
						"acc,none": 0.2964669738863287,
						"acc_norm,none": 0.30721966205837176,
						"acc_norm_stderr,none": 0.01809529226082822,
						"acc_stderr,none": 0.017913222760382753,
						"alias": "logiqa"
					},
					"logiqa2": {
						"acc,none": 0.3505089058524173,
						"acc_norm,none": 0.3435114503816794,
						"acc_norm_stderr,none": 0.011981083483986733,
						"acc_stderr,none": 0.01203782529856954,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.37051926298157456,
						"acc_norm,none": 0.3688442211055276,
						"acc_norm_stderr,none": 0.008832636623685441,
						"acc_stderr,none": 0.008840914868809937,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.7994069053166702,
						"acc_stderr,none": 0.004121287749681853,
						"alias": "mc_taco",
						"f1,none": 0.734733893557423,
						"f1_stderr,none": 0.005895264085796533
					},
					"medmcqa": {
						"acc,none": 0.46282572316519244,
						"acc_norm,none": 0.46282572316519244,
						"acc_norm_stderr,none": 0.007710354282495721,
						"acc_stderr,none": 0.007710354282495721,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.5043205027494109,
						"acc_norm,none": 0.5043205027494109,
						"acc_norm_stderr,none": 0.014018780453018352,
						"acc_stderr,none": 0.014018780453018352,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.5901580971371599,
						"acc_stderr,none": 0.1308916942503901,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.5777777777777777,
						"acc_stderr,none": 0.04266763404099582,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.039397364351956274,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.59,
						"acc_stderr,none": 0.04943110704237101,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.660377358490566,
						"acc_stderr,none": 0.029146904747798335,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.6527777777777778,
						"acc_stderr,none": 0.03981240543717861,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.048783173121456316,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.05009082659620333,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252605,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.5664739884393064,
						"acc_stderr,none": 0.03778621079092056,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.39215686274509803,
						"acc_stderr,none": 0.048580835742663434,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.67,
						"acc_stderr,none": 0.047258156262526066,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.5106382978723404,
						"acc_stderr,none": 0.03267862331014063,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.4824561403508772,
						"acc_stderr,none": 0.04700708033551038,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.5586206896551724,
						"acc_stderr,none": 0.04137931034482757,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.42328042328042326,
						"acc_stderr,none": 0.02544636563440678,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.35714285714285715,
						"acc_stderr,none": 0.04285714285714281,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.04943110704237102,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.6935483870967742,
						"acc_stderr,none": 0.026226485652553887,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.5073891625615764,
						"acc_stderr,none": 0.035176035403610105,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.7272727272727273,
						"acc_stderr,none": 0.0347769116216366,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.7474747474747475,
						"acc_stderr,none": 0.030954055470365907,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.8186528497409327,
						"acc_stderr,none": 0.027807032360686088,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.5769230769230769,
						"acc_stderr,none": 0.025049197876042338,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.34444444444444444,
						"acc_stderr,none": 0.02897264888484427,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.6596638655462185,
						"acc_stderr,none": 0.030778057422931673,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.3708609271523179,
						"acc_stderr,none": 0.03943966699183629,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.7853211009174312,
						"acc_stderr,none": 0.01760430414925649,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.4537037037037037,
						"acc_stderr,none": 0.033953227263757976,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.7843137254901961,
						"acc_stderr,none": 0.028867431449849303,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.7805907172995781,
						"acc_stderr,none": 0.026939106581553945,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.6278026905829597,
						"acc_stderr,none": 0.03244305283008732,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.6946564885496184,
						"acc_stderr,none": 0.04039314978724562,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.5385759829968119,
						"acc_stderr,none": 0.14701376176218026,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.7603305785123967,
						"acc_stderr,none": 0.03896878985070417,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.7129629629629629,
						"acc_stderr,none": 0.043733130409147614,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.7607361963190185,
						"acc_stderr,none": 0.033519538795212696,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.4642857142857143,
						"acc_stderr,none": 0.04733667890053756,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.7378640776699029,
						"acc_stderr,none": 0.043546310772605956,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.8717948717948718,
						"acc_stderr,none": 0.02190190511507332,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.7879948914431673,
						"acc_stderr,none": 0.014616099385833711,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.6589595375722543,
						"acc_stderr,none": 0.025522474632121612,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.33631284916201115,
						"acc_stderr,none": 0.015801003729145904,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.673202614379085,
						"acc_stderr,none": 0.026857294663281416,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.664628258770518,
						"acc_stderr,none": 0.1089782488947092,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.6463022508038585,
						"acc_stderr,none": 0.027155208103200882,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.6697530864197531,
						"acc_stderr,none": 0.026168298456732846,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.45390070921985815,
						"acc_stderr,none": 0.02970045324729147,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.42046936114732725,
						"acc_stderr,none": 0.012607654553832705,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.6654411764705882,
						"acc_stderr,none": 0.028661996202335317,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.5898692810457516,
						"acc_stderr,none": 0.019898412717635892,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.6727272727272727,
						"acc_stderr,none": 0.04494290866252088,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.7061224489795919,
						"acc_stderr,none": 0.029162738410249755,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.6889827754306143,
						"acc_stderr,none": 0.08980026474895134,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.8407960199004975,
						"acc_stderr,none": 0.02587064676616914,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.497304154773232,
						"acc_stderr,none": 0.10643423341212979,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.039427724440366234,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.463855421686747,
						"acc_stderr,none": 0.03882310850890594,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.8128654970760234,
						"acc_stderr,none": 0.029913127232368032,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.5529291900152827,
						"acc_stderr,none": 0.005018800001869641,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.5537021969080553,
						"acc_stderr,none": 0.0050136227202211975,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7328431372549019,
						"acc_stderr,none": 0.021932668544150206,
						"alias": "mrpc",
						"f1,none": 0.8310077519379845,
						"f1_stderr,none": 0.01593988577529229
					},
					"multimedqa": {
						"acc,none": 0.5175301632363378,
						"acc_norm,none": 0.4778006153543328,
						"acc_norm_stderr,none": 0.0001606261257056986,
						"acc_stderr,none": 0.0694061118891545,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.33745874587458746,
						"acc_stderr,none": 0.006791728192424027,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7563017318130347,
						"mrr_stderr,none": 0.010009184048616275,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.39954853273137697,
						"r@2_stderr,none": 0.01646463433752643
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6923438690720508,
						"mrr_stderr,none": 0.01046550898169533,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750955,
						"r@2,none": 0.4401805869074492,
						"r@2_stderr,none": 0.016686597274671543
					},
					"openbookqa": {
						"acc,none": 0.352,
						"acc_norm,none": 0.454,
						"acc_norm_stderr,none": 0.02228814759117695,
						"acc_stderr,none": 0.021380042385946048,
						"alias": "openbookqa"
					},
					"piqa": {
						"acc,none": 0.8025027203482046,
						"acc_norm,none": 0.8068552774755169,
						"acc_norm_stderr,none": 0.009210530962579788,
						"acc_stderr,none": 0.009288578108523272,
						"alias": "piqa"
					},
					"prost": {
						"acc,none": 0.4664282664389411,
						"acc_norm,none": 0.43824722459436377,
						"acc_norm_stderr,none": 0.0036249778054749677,
						"acc_stderr,none": 0.003644701699456615,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.758,
						"acc_stderr,none": 0.019173085678337157,
						"alias": "pubmedqa"
					},
					"qa4mre": {
						"acc,none": 0.5460992907801419,
						"acc_norm,none": 0.5815602836879432,
						"acc_norm_stderr,none": 0.07866017109546872,
						"acc_stderr,none": 0.06008175299623157,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.6583333333333333,
						"acc_norm,none": 0.7333333333333333,
						"acc_norm_stderr,none": 0.040537932807004046,
						"acc_stderr,none": 0.04347611684317006,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.55,
						"acc_norm,none": 0.61875,
						"acc_norm_stderr,none": 0.03851802138867096,
						"acc_stderr,none": 0.03945381823835186,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.4964788732394366,
						"acc_norm,none": 0.4964788732394366,
						"acc_norm_stderr,none": 0.029721177900313853,
						"acc_stderr,none": 0.02972117790031384,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5842943437671609,
						"acc_stderr,none": 0.0066685737728695215,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5877813504823152,
						"acc_stderr,none": 0.00244807782265664,
						"alias": "qqp",
						"f1,none": 0.4384014018061733,
						"f1_stderr,none": 0.0036014830728846657
					},
					"race": {
						"acc,none": 0.4583732057416268,
						"acc_stderr,none": 0.015420889760190567,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.7003610108303249,
						"acc_stderr,none": 0.02757437014529261,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.954,
						"acc_norm,none": 0.905,
						"acc_norm_stderr,none": 0.009276910103103298,
						"acc_stderr,none": 0.006627814717380709,
						"alias": "sciq"
					},
					"sglue_rte": {
						"acc,none": 0.7003610108303249,
						"acc_stderr,none": 0.02757437014529261,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8555045871559633,
						"acc_stderr,none": 0.01191321895589123,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5974707587723683,
						"acc_norm,none": 0.7870638808357493,
						"acc_norm_stderr,none": 0.0028944122046582997,
						"acc_stderr,none": 0.0034672708384908342,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.8909853249475891,
						"acc_stderr,none": 0.06497187134187173,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.9704527243589743,
						"acc_stderr,none": 0.0016947879911929757,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.9812506334245464,
						"acc_stderr,none": 0.001365566076894862,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.7258823529411764,
						"acc_stderr,none": 0.00441695343620367,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.061515748031496065,
						"exact_match_stderr,none": 0.005331527918306684
					},
					"wic": {
						"acc,none": 0.6003134796238244,
						"acc_stderr,none": 0.019407923975502145,
						"alias": "wic"
					},
					"winogrande": {
						"acc,none": 0.7371744277821626,
						"acc_stderr,none": 0.012370922527262008,
						"alias": "winogrande"
					},
					"wnli": {
						"acc,none": 0.6056338028169014,
						"acc_stderr,none": 0.05841251085444426,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6346153846153846,
						"acc_stderr,none": 0.047447333932779195,
						"alias": "wsc"
					},
					"wsc273": {
						"acc,none": 0.8827838827838828,
						"acc_stderr,none": 0.01950457139863538,
						"alias": "wsc273"
					}
				}
			}
		},
		"name": "mistralai/Mistral-7B-Instruct-v0.2"
	},
	"mistralai/Mistral-7B-v0.1": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.7065952649379932,
						"acc_norm,none": 0.7130777903043969,
						"acc_norm_stderr,none": 0.08088060213614173,
						"acc_stderr,none": 0.09736868212773775,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3803125,
						"acc_stderr,none": 0.015133650384246593,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.90035,
						"acc_stderr,none": 0.10511691499262178,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8289253731343283,
						"acc_stderr,none": 0.1651542013152315,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.40713224368499257,
						"acc_norm,none": 0.40713224368499257,
						"acc_norm_stderr,none": 0.14694388399894986,
						"acc_stderr,none": 0.14694388399894986,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.3979450872042825,
						"acc_norm,none": 0.3979450872042825,
						"acc_norm_stderr,none": 0.09095017592357733,
						"acc_stderr,none": 0.09095017592357733,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.039303237651172,
						"likelihood_diff_stderr,none": 0.4554088930609172,
						"pct_stereotype,none": 0.5945140131186643,
						"pct_stereotype_stderr,none": 0.08619149455497793
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.15403543307086615,
						"exact_match_stderr,none": 0.008009980186286517
					},
					"glue": {
						"acc,none": 0.5149773701762745,
						"acc_stderr,none": 0.0011586493887209115,
						"alias": "glue",
						"f1,none": 0.3490504972495486,
						"f1_stderr,none": 0.0013384442216884647,
						"mcc,none": -0.04847021005996873,
						"mcc_stderr,none": 0.030783455837743674
					},
					"kmmlu": {
						"acc,none": 0.33245740687265374,
						"acc_norm,none": 0.33245740687265374,
						"acc_norm_stderr,none": 0.047791173115529544,
						"acc_stderr,none": 0.047791173115529544,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5507564130673098,
						"acc_norm,none": 0.518,
						"acc_norm_stderr,none": 0.0005003527054108269,
						"acc_stderr,none": 0.05115523112210208,
						"alias": "kobest",
						"f1,none": 0.4791948665252917,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7254997089074325,
						"acc_stderr,none": 0.01676361499326025,
						"alias": "lambada",
						"perplexity,none": 3.477999261370286,
						"perplexity_stderr,none": 0.16329169211004324
					},
					"lambada_cloze": {
						"acc,none": 0.08354356685425965,
						"acc_stderr,none": 0.017359537873426072,
						"alias": "lambada_cloze",
						"perplexity,none": 92.9621493847383,
						"perplexity_stderr,none": 4.749192287461508
					},
					"lambada_multilingual": {
						"acc,none": 0.5186493304870949,
						"acc_stderr,none": 0.09575650539776685,
						"alias": "lambada_multilingual",
						"perplexity,none": 27.056756454439657,
						"perplexity_stderr,none": 10.55570565115782
					},
					"mmlu": {
						"acc,none": 0.596425010682239,
						"acc_stderr,none": 0.13844007438448744,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.5343251859723698,
						"acc_stderr,none": 0.16186401202777495,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.6816865143224976,
						"acc_stderr,none": 0.09811125388055714,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.6958076048098798,
						"acc_stderr,none": 0.08956129050566648,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.5080875356803045,
						"acc_stderr,none": 0.1215476495130753,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.5325762952448545,
						"acc_norm,none": 0.4918553467234212,
						"acc_norm_stderr,none": 0.0001327699679693839,
						"acc_stderr,none": 0.06628005425577563,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4147857142857143,
						"acc_stderr,none": 0.057291271279150895,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7809920153879631,
						"acc_norm,none": 0.7148213731718109,
						"acc_norm_stderr,none": 0.0082806592848003,
						"acc_stderr,none": 0.15963097565490927,
						"alias": "pythia",
						"bits_per_byte,none": 0.7790910200090022,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.7160493234446637,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.180029723439131,
						"perplexity_stderr,none": 0.05830830767398106,
						"word_perplexity,none": 17.952971816297133,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4875886524822695,
						"acc_norm,none": 0.5372340425531915,
						"acc_norm_stderr,none": 0.07588716246805031,
						"acc_stderr,none": 0.04415178467483081,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.803766929553093,
						"acc_stderr,none": 0.08597393979979331,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.35372429452780935,
						"acc_stderr,none": 0.0015289998228574559,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.41370869033047736,
						"bleu_acc_stderr,none": 0.0172408618120998,
						"bleu_diff,none": -1.4792658300937298,
						"bleu_diff_stderr,none": 1.0365996246291096,
						"bleu_max,none": 30.506553066296135,
						"bleu_max_stderr,none": 0.8545048499620589,
						"rouge1_acc,none": 0.397796817625459,
						"rouge1_acc_stderr,none": 0.017133934248559652,
						"rouge1_diff,none": -1.916659079286129,
						"rouge1_diff_stderr,none": 1.2644440329641677,
						"rouge1_max,none": 55.752927992652076,
						"rouge1_max_stderr,none": 0.9304517781051922,
						"rouge2_acc,none": 0.3537331701346389,
						"rouge2_acc_stderr,none": 0.016737814358846147,
						"rouge2_diff,none": -2.594294632926603,
						"rouge2_diff_stderr,none": 1.4182491490828897,
						"rouge2_max,none": 40.96291082037781,
						"rouge2_max_stderr,none": 1.1007195429525476,
						"rougeL_acc,none": 0.40269277845777235,
						"rougeL_acc_stderr,none": 0.017168830935187215,
						"rougeL_diff,none": -2.232349573503825,
						"rougeL_diff_stderr,none": 1.2801381361084667,
						"rougeL_max,none": 53.07319351290984,
						"rougeL_max_stderr,none": 0.9465416042598798
					},
					"xcopa": {
						"acc,none": 0.5594545454545454,
						"acc_stderr,none": 0.05387910421610255,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.43052208835341366,
						"acc_stderr,none": 0.05866526647573086,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5916611515552614,
						"acc_stderr,none": 0.07661315123253779,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8120926050797932,
						"acc_stderr,none": 0.04148122639462828,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.7065952649379932,
						"acc_norm,none": 0.7130777903043969,
						"acc_norm_stderr,none": 0.08088060213614173,
						"acc_stderr,none": 0.09736868212773775,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3803125,
						"acc_stderr,none": 0.015133650384246593,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.377,
						"acc_stderr,none": 0.01533317012577988,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.376,
						"acc_stderr,none": 0.015325105508898125,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.38666666666666666,
						"acc_stderr,none": 0.014063941778353468,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.5008532423208191,
						"acc_norm,none": 0.5426621160409556,
						"acc_norm_stderr,none": 0.014558106543924068,
						"acc_stderr,none": 0.014611369529813262,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.8080808080808081,
						"acc_norm,none": 0.7971380471380471,
						"acc_norm_stderr,none": 0.008251544823606903,
						"acc_stderr,none": 0.008080808080807977,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.90035,
						"acc_stderr,none": 0.10511691499262178,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.644,
						"acc_stderr,none": 0.01070931112034454,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.9985,
						"acc_stderr,none": 0.0008655920660521429,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.709,
						"acc_stderr,none": 0.010159286665547608,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.9985,
						"acc_stderr,none": 0.000865592066052145,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.002891311093590575,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.9885,
						"acc_stderr,none": 0.0023846841214675827,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.0046854003551718435,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.9455,
						"acc_stderr,none": 0.005077180702116209,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.0063686560505294655,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.8715,
						"acc_stderr,none": 0.0074847769467748975,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.01735357917570499,
						"acc_stderr,none": 0.002720520054825065,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8289253731343283,
						"acc_stderr,none": 0.1651542013152315,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662753,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.003148000938676753,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987295,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.813,
						"acc_stderr,none": 0.012336254828074118,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653886,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.758,
						"acc_stderr,none": 0.013550631705555965,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.573,
						"acc_stderr,none": 0.01564978964446222,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.012795613612786548,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936701,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469276,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.00314800093867677,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.005893957816165549,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.00648892179842742,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584937,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832013,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557415,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611458,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.00843458014024064,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.012886662332274534,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.753,
						"acc_stderr,none": 0.013644675781314121,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.811,
						"acc_stderr,none": 0.01238678458811771,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639237,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.010640169792499368,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.0035838308894036337,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.178,
						"acc_stderr,none": 0.012102167676183587,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787733,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386675,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.649,
						"acc_stderr,none": 0.015100563798316405,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.012795613612786555,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767676,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.00843458014024065,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333452,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.549,
						"acc_stderr,none": 0.01574315237958554,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323508,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.015752210388771847,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.0152048409129195,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.629,
						"acc_stderr,none": 0.015283736211823188,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.002987963843142655,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.754,
						"acc_stderr,none": 0.013626065817750641,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662742,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.00932045443478322,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.812,
						"acc_stderr,none": 0.01236158601510377,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474927,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000132,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.01067487484483796,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.634,
						"acc_stderr,none": 0.015240612726405756,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.01580663942303517,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.005893957816165545,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946088,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987295,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.751,
						"acc_stderr,none": 0.01368160027870233,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.015819173374302702,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.0065588122414061215,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.963,
						"acc_stderr,none": 0.005972157622389623,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.594,
						"acc_stderr,none": 0.015537226438634595,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.01009340759490462,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823335,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.013807775152234195,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235247,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118581,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753653,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656796,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099193,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.364,
						"acc_stderr,none": 0.01522286884052202,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.329,
						"acc_stderr,none": 0.014865395385928355,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.8363914373088684,
						"acc_stderr,none": 0.006469941343840766,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.48214285714285715,
						"acc_stderr,none": 0.06737697508644648,
						"alias": "cb",
						"f1,none": 0.28777777777777774,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.40713224368499257,
						"acc_norm,none": 0.40713224368499257,
						"acc_norm_stderr,none": 0.14694388399894986,
						"acc_stderr,none": 0.14694388399894986,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.066522473522476,
						"acc_stderr,none": 0.066522473522476,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275461,
						"acc_stderr,none": 0.08124094920275461,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122594,
						"acc_stderr,none": 0.08503766788122594,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.3404255319148936,
						"acc_norm,none": 0.3404255319148936,
						"acc_norm_stderr,none": 0.06986570800554745,
						"acc_stderr,none": 0.06986570800554745,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.10094660663590604,
						"acc_stderr,none": 0.10094660663590604,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.06546202725664503,
						"acc_stderr,none": 0.06546202725664503,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.5675675675675675,
						"acc_norm,none": 0.5675675675675675,
						"acc_norm_stderr,none": 0.08256893144064577,
						"acc_stderr,none": 0.08256893144064577,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.5238095238095238,
						"acc_norm,none": 0.5238095238095238,
						"acc_norm_stderr,none": 0.11167656571008164,
						"acc_stderr,none": 0.11167656571008164,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.08539125638299665,
						"acc_stderr,none": 0.08539125638299665,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.4827586206896552,
						"acc_norm,none": 0.4827586206896552,
						"acc_norm_stderr,none": 0.09443492370778725,
						"acc_stderr,none": 0.09443492370778725,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.3783783783783784,
						"acc_norm,none": 0.3783783783783784,
						"acc_norm_stderr,none": 0.08083044344561426,
						"acc_stderr,none": 0.08083044344561426,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.41935483870967744,
						"acc_norm,none": 0.41935483870967744,
						"acc_norm_stderr,none": 0.0900918712501222,
						"acc_stderr,none": 0.0900918712501222,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.5161290322580645,
						"acc_norm,none": 0.5161290322580645,
						"acc_norm_stderr,none": 0.09123958466923197,
						"acc_stderr,none": 0.09123958466923197,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.7,
						"acc_norm,none": 0.7,
						"acc_norm_stderr,none": 0.10513149660756935,
						"acc_stderr,none": 0.10513149660756935,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.05555555555555555,
						"acc_norm,none": 0.05555555555555555,
						"acc_norm_stderr,none": 0.05555555555555556,
						"acc_stderr,none": 0.05555555555555556,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.631578947368421,
						"acc_norm,none": 0.631578947368421,
						"acc_norm_stderr,none": 0.11369720523522563,
						"acc_stderr,none": 0.11369720523522563,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.5789473684210527,
						"acc_norm,none": 0.5789473684210527,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252413,
						"acc_stderr,none": 0.09477598811252413,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.391304347826087,
						"acc_norm,none": 0.391304347826087,
						"acc_norm_stderr,none": 0.10405096111532161,
						"acc_stderr,none": 0.10405096111532161,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4583333333333333,
						"acc_norm,none": 0.4583333333333333,
						"acc_norm_stderr,none": 0.10389457216622949,
						"acc_stderr,none": 0.10389457216622949,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.5416666666666666,
						"acc_norm,none": 0.5416666666666666,
						"acc_norm_stderr,none": 0.10389457216622949,
						"acc_stderr,none": 0.10389457216622949,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.6666666666666666,
						"acc_norm,none": 0.6666666666666666,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09933992677987828,
						"acc_stderr,none": 0.09933992677987828,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.14213381090374033,
						"acc_stderr,none": 0.14213381090374033,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.47619047619047616,
						"acc_norm,none": 0.47619047619047616,
						"acc_norm_stderr,none": 0.11167656571008164,
						"acc_stderr,none": 0.11167656571008164,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.43478260869565216,
						"acc_norm,none": 0.43478260869565216,
						"acc_norm_stderr,none": 0.10568965974008647,
						"acc_stderr,none": 0.10568965974008647,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.40816326530612246,
						"acc_norm,none": 0.40816326530612246,
						"acc_norm_stderr,none": 0.07094099868916398,
						"acc_stderr,none": 0.07094099868916398,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.10865714630312667,
						"acc_stderr,none": 0.10865714630312667,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633637,
						"acc_stderr,none": 0.09169709590633637,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.06652247352247599,
						"acc_stderr,none": 0.06652247352247599,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.5681818181818182,
						"acc_norm,none": 0.5681818181818182,
						"acc_norm_stderr,none": 0.07553702921752882,
						"acc_stderr,none": 0.07553702921752882,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.5217391304347826,
						"acc_norm,none": 0.5217391304347826,
						"acc_norm_stderr,none": 0.07446511639805872,
						"acc_stderr,none": 0.07446511639805872,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.5217391304347826,
						"acc_norm,none": 0.5217391304347826,
						"acc_norm_stderr,none": 0.10649955403405124,
						"acc_stderr,none": 0.10649955403405124,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.3979450872042825,
						"acc_norm,none": 0.3979450872042825,
						"acc_norm_stderr,none": 0.09095017592357733,
						"acc_stderr,none": 0.09095017592357733,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.35502958579881655,
						"acc_norm,none": 0.35502958579881655,
						"acc_norm_stderr,none": 0.036918795945769134,
						"acc_stderr,none": 0.036918795945769134,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.03662869876642905,
						"acc_stderr,none": 0.03662869876642905,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.39375,
						"acc_norm,none": 0.39375,
						"acc_norm_stderr,none": 0.03874695666685831,
						"acc_stderr,none": 0.03874695666685831,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3151515151515151,
						"acc_norm,none": 0.3151515151515151,
						"acc_norm_stderr,none": 0.0362773057502241,
						"acc_stderr,none": 0.0362773057502241,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.4354066985645933,
						"acc_norm,none": 0.4354066985645933,
						"acc_norm_stderr,none": 0.03437824847655481,
						"acc_stderr,none": 0.03437824847655481,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.3625,
						"acc_norm,none": 0.3625,
						"acc_norm_stderr,none": 0.038123743406448904,
						"acc_stderr,none": 0.038123743406448904,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.5114503816793893,
						"acc_norm,none": 0.5114503816793893,
						"acc_norm_stderr,none": 0.04384140024078016,
						"acc_stderr,none": 0.04384140024078016,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.35294117647058826,
						"acc_norm,none": 0.35294117647058826,
						"acc_norm_stderr,none": 0.041129758751770655,
						"acc_stderr,none": 0.041129758751770655,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.45794392523364486,
						"acc_norm,none": 0.45794392523364486,
						"acc_norm_stderr,none": 0.04839219555189162,
						"acc_stderr,none": 0.04839219555189162,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.4086687306501548,
						"acc_norm,none": 0.4086687306501548,
						"acc_norm_stderr,none": 0.027395118985328946,
						"acc_stderr,none": 0.027395118985328946,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.35294117647058826,
						"acc_norm,none": 0.35294117647058826,
						"acc_norm_stderr,none": 0.03354092437591519,
						"acc_stderr,none": 0.03354092437591519,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.46368715083798884,
						"acc_norm,none": 0.46368715083798884,
						"acc_norm_stderr,none": 0.03737761880538031,
						"acc_stderr,none": 0.03737761880538031,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.31223628691983124,
						"acc_norm,none": 0.31223628691983124,
						"acc_norm_stderr,none": 0.030165137867847008,
						"acc_stderr,none": 0.030165137867847008,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.4672897196261682,
						"acc_norm,none": 0.4672897196261682,
						"acc_norm_stderr,none": 0.04846025774523467,
						"acc_stderr,none": 0.04846025774523467,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.37735849056603776,
						"acc_norm,none": 0.37735849056603776,
						"acc_norm_stderr,none": 0.04730439022852895,
						"acc_stderr,none": 0.04730439022852895,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.37037037037037035,
						"acc_norm,none": 0.37037037037037035,
						"acc_norm_stderr,none": 0.04668408033024931,
						"acc_stderr,none": 0.04668408033024931,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.44339622641509435,
						"acc_norm,none": 0.44339622641509435,
						"acc_norm_stderr,none": 0.0484813182297548,
						"acc_stderr,none": 0.0484813182297548,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.027984879811884515,
						"acc_stderr,none": 0.027984879811884515,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.47058823529411764,
						"acc_norm,none": 0.47058823529411764,
						"acc_norm_stderr,none": 0.03503235296367994,
						"acc_stderr,none": 0.03503235296367994,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.4853801169590643,
						"acc_norm,none": 0.4853801169590643,
						"acc_norm_stderr,none": 0.038331852752130205,
						"acc_stderr,none": 0.038331852752130205,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.3945578231292517,
						"acc_norm,none": 0.3945578231292517,
						"acc_norm_stderr,none": 0.040449693713112876,
						"acc_stderr,none": 0.040449693713112876,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.41007194244604317,
						"acc_norm,none": 0.41007194244604317,
						"acc_norm_stderr,none": 0.04186875148834218,
						"acc_stderr,none": 0.04186875148834218,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.5220125786163522,
						"acc_norm,none": 0.5220125786163522,
						"acc_norm_stderr,none": 0.03973929649561243,
						"acc_stderr,none": 0.03973929649561243,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.4723926380368098,
						"acc_norm,none": 0.4723926380368098,
						"acc_norm_stderr,none": 0.03922378290610991,
						"acc_stderr,none": 0.03922378290610991,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.37790697674418605,
						"acc_norm,none": 0.37790697674418605,
						"acc_norm_stderr,none": 0.03707849218723281,
						"acc_stderr,none": 0.03707849218723281,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.02747460833869741,
						"acc_stderr,none": 0.02747460833869741,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.3838383838383838,
						"acc_norm,none": 0.3838383838383838,
						"acc_norm_stderr,none": 0.03464881675016339,
						"acc_stderr,none": 0.03464881675016339,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.6260504201680672,
						"acc_norm,none": 0.6260504201680672,
						"acc_norm_stderr,none": 0.031429466378837076,
						"acc_stderr,none": 0.031429466378837076,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2782608695652174,
						"acc_norm,none": 0.2782608695652174,
						"acc_norm_stderr,none": 0.029614094221633722,
						"acc_stderr,none": 0.029614094221633722,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.3037037037037037,
						"acc_norm,none": 0.3037037037037037,
						"acc_norm_stderr,none": 0.039725528847851375,
						"acc_stderr,none": 0.039725528847851375,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.46853146853146854,
						"acc_norm,none": 0.46853146853146854,
						"acc_norm_stderr,none": 0.041875883974458995,
						"acc_stderr,none": 0.041875883974458995,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.32954545454545453,
						"acc_norm,none": 0.32954545454545453,
						"acc_norm_stderr,none": 0.035532299023675745,
						"acc_stderr,none": 0.035532299023675745,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.40268456375838924,
						"acc_norm,none": 0.40268456375838924,
						"acc_norm_stderr,none": 0.04031377823191209,
						"acc_stderr,none": 0.04031377823191209,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.3727810650887574,
						"acc_norm,none": 0.3727810650887574,
						"acc_norm_stderr,none": 0.03730627281928549,
						"acc_stderr,none": 0.03730627281928549,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.03986246938961656,
						"acc_stderr,none": 0.03986246938961656,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.4067796610169492,
						"acc_norm,none": 0.4067796610169492,
						"acc_norm_stderr,none": 0.045414517088615894,
						"acc_stderr,none": 0.045414517088615894,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.0344500028917346,
						"acc_stderr,none": 0.0344500028917346,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.04461272175910508,
						"acc_stderr,none": 0.04461272175910508,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3776223776223776,
						"acc_norm,none": 0.3776223776223776,
						"acc_norm_stderr,none": 0.040682878492098076,
						"acc_stderr,none": 0.040682878492098076,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.4444444444444444,
						"acc_norm,none": 0.4444444444444444,
						"acc_norm_stderr,none": 0.044444444444444495,
						"acc_stderr,none": 0.044444444444444495,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.32972972972972975,
						"acc_norm,none": 0.32972972972972975,
						"acc_norm_stderr,none": 0.03465733148032954,
						"acc_stderr,none": 0.03465733148032954,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.42441860465116277,
						"acc_norm,none": 0.42441860465116277,
						"acc_norm_stderr,none": 0.037796581784641,
						"acc_stderr,none": 0.037796581784641,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.39172749391727496,
						"acc_norm,none": 0.39172749391727496,
						"acc_norm_stderr,none": 0.024107334397898715,
						"acc_stderr,none": 0.024107334397898715,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.6588785046728972,
						"acc_norm,none": 0.6588785046728972,
						"acc_norm_stderr,none": 0.03248384363697549,
						"acc_stderr,none": 0.03248384363697549,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.3902439024390244,
						"acc_norm,none": 0.3902439024390244,
						"acc_norm_stderr,none": 0.04416377855732609,
						"acc_stderr,none": 0.04416377855732609,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.4262295081967213,
						"acc_norm,none": 0.4262295081967213,
						"acc_norm_stderr,none": 0.04495708831296081,
						"acc_stderr,none": 0.04495708831296081,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.4857142857142857,
						"acc_norm,none": 0.4857142857142857,
						"acc_norm_stderr,none": 0.034571603689472506,
						"acc_stderr,none": 0.034571603689472506,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.5277777777777778,
						"acc_norm,none": 0.5277777777777778,
						"acc_norm_stderr,none": 0.037314037607574575,
						"acc_stderr,none": 0.037314037607574575,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.5291005291005291,
						"acc_norm,none": 0.5291005291005291,
						"acc_norm_stderr,none": 0.036404433270336836,
						"acc_stderr,none": 0.036404433270336836,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3275862068965517,
						"acc_norm,none": 0.3275862068965517,
						"acc_norm_stderr,none": 0.04376552980994349,
						"acc_stderr,none": 0.04376552980994349,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.45517241379310347,
						"acc_norm,none": 0.45517241379310347,
						"acc_norm_stderr,none": 0.04149886942192118,
						"acc_stderr,none": 0.04149886942192118,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.41904761904761906,
						"acc_norm,none": 0.41904761904761906,
						"acc_norm_stderr,none": 0.0483821637528253,
						"acc_stderr,none": 0.0483821637528253,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.03751612367420645,
						"acc_stderr,none": 0.03751612367420645,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.32701421800947866,
						"acc_norm,none": 0.32701421800947866,
						"acc_norm_stderr,none": 0.03237252797910212,
						"acc_stderr,none": 0.03237252797910212,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.3271276595744681,
						"acc_norm,none": 0.3271276595744681,
						"acc_norm_stderr,none": 0.024227541017929646,
						"acc_stderr,none": 0.024227541017929646,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.4396551724137931,
						"acc_norm,none": 0.4396551724137931,
						"acc_norm_stderr,none": 0.03265711286547217,
						"acc_stderr,none": 0.03265711286547217,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.4885057471264368,
						"acc_norm,none": 0.4885057471264368,
						"acc_norm_stderr,none": 0.03800425000198232,
						"acc_stderr,none": 0.03800425000198232,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.4444444444444444,
						"acc_norm,none": 0.4444444444444444,
						"acc_norm_stderr,none": 0.04292596718256981,
						"acc_stderr,none": 0.04292596718256981,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.40707964601769914,
						"acc_norm,none": 0.40707964601769914,
						"acc_norm_stderr,none": 0.03275266284786317,
						"acc_stderr,none": 0.03275266284786317,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3878787878787879,
						"acc_norm,none": 0.3878787878787879,
						"acc_norm_stderr,none": 0.038049136539710114,
						"acc_stderr,none": 0.038049136539710114,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336699,
						"acc_stderr,none": 0.03333068663336699,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.47928994082840237,
						"acc_norm,none": 0.47928994082840237,
						"acc_norm_stderr,none": 0.03854273242663734,
						"acc_stderr,none": 0.03854273242663734,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.453416149068323,
						"acc_norm,none": 0.453416149068323,
						"acc_norm_stderr,none": 0.03935653891289664,
						"acc_stderr,none": 0.03935653891289664,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.425,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.0392039498715957,
						"acc_stderr,none": 0.0392039498715957,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.04847021005996873,
						"mcc_stderr,none": 0.030783455837743674
					},
					"copa": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.0256432399976243,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.039303237651172,
						"likelihood_diff_stderr,none": 0.4554088930609172,
						"pct_stereotype,none": 0.5945140131186643,
						"pct_stereotype_stderr,none": 0.08619149455497793
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 4.172200931419414,
						"likelihood_diff_stderr,none": 0.09444066856881643,
						"pct_stereotype,none": 0.6583184257602862,
						"pct_stereotype_stderr,none": 0.0115848863578411
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.81760980794718,
						"likelihood_diff_stderr,none": 0.39572701594612664,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.04815143362682777
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 8.978192589499734,
						"likelihood_diff_stderr,none": 2.383266706466439,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726126
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.123607283372145,
						"likelihood_diff_stderr,none": 0.6269717764216254,
						"pct_stereotype,none": 0.7538461538461538,
						"pct_stereotype_stderr,none": 0.05384615384615383
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 3.593177890777588,
						"likelihood_diff_stderr,none": 0.1891835945318628,
						"pct_stereotype,none": 0.565625,
						"pct_stereotype_stderr,none": 0.02775245248136475
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.503406215597082,
						"likelihood_diff_stderr,none": 0.2223140250179109,
						"pct_stereotype,none": 0.5648148148148148,
						"pct_stereotype_stderr,none": 0.03381200005643525
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.352519141303168,
						"likelihood_diff_stderr,none": 0.429502683647509,
						"pct_stereotype,none": 0.7777777777777778,
						"pct_stereotype_stderr,none": 0.04933922619854289
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 4.192574343343419,
						"likelihood_diff_stderr,none": 0.18220038540379538,
						"pct_stereotype,none": 0.6220472440944882,
						"pct_stereotype_stderr,none": 0.02153408701954117
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.5383266930107595,
						"likelihood_diff_stderr,none": 0.34676935247597424,
						"pct_stereotype,none": 0.8198198198198198,
						"pct_stereotype_stderr,none": 0.03664513893725976
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.885875209685294,
						"likelihood_diff_stderr,none": 0.40901674280141453,
						"pct_stereotype,none": 0.8817204301075269,
						"pct_stereotype_stderr,none": 0.033668704543479824
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.445691620676141,
						"likelihood_diff_stderr,none": 0.25789243822181585,
						"pct_stereotype,none": 0.7052631578947368,
						"pct_stereotype_stderr,none": 0.03316361842984287
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.9064055438829306,
						"likelihood_diff_stderr,none": 0.09471848282380547,
						"pct_stereotype,none": 0.5307096004770423,
						"pct_stereotype_stderr,none": 0.012190241226841262
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.0736910078260635,
						"likelihood_diff_stderr,none": 0.3236843030438557,
						"pct_stereotype,none": 0.5222222222222223,
						"pct_stereotype_stderr,none": 0.05294752255076824
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.385772411639874,
						"likelihood_diff_stderr,none": 0.9269473104034913,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.13323467750529824
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.82528576706395,
						"likelihood_diff_stderr,none": 0.432435922942141,
						"pct_stereotype,none": 0.6363636363636364,
						"pct_stereotype_stderr,none": 0.05966637484671758
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.4997918078461168,
						"likelihood_diff_stderr,none": 0.1914324136821373,
						"pct_stereotype,none": 0.5171339563862928,
						"pct_stereotype_stderr,none": 0.027934433698537306
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.111823522997468,
						"likelihood_diff_stderr,none": 0.2296054767953655,
						"pct_stereotype,none": 0.3675889328063241,
						"pct_stereotype_stderr,none": 0.030372509322709233
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.6255602306789823,
						"likelihood_diff_stderr,none": 0.43425524662184634,
						"pct_stereotype,none": 0.5972222222222222,
						"pct_stereotype_stderr,none": 0.05820650942569532
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 4.240377683224885,
						"likelihood_diff_stderr,none": 0.2098809816569495,
						"pct_stereotype,none": 0.4652173913043478,
						"pct_stereotype_stderr,none": 0.023281462893244318
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.6430626578952956,
						"likelihood_diff_stderr,none": 0.32613230548605726,
						"pct_stereotype,none": 0.7130434782608696,
						"pct_stereotype_stderr,none": 0.042365626207479204
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.6517183492471883,
						"likelihood_diff_stderr,none": 0.3279010024708823,
						"pct_stereotype,none": 0.7252747252747253,
						"pct_stereotype_stderr,none": 0.047052133987784385
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.006779777760408,
						"likelihood_diff_stderr,none": 0.30335267862395343,
						"pct_stereotype,none": 0.6530612244897959,
						"pct_stereotype_stderr,none": 0.03408678678944596
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.15403543307086615,
						"exact_match_stderr,none": 0.008009980186286517
					},
					"glue": {
						"acc,none": 0.5149773701762745,
						"acc_stderr,none": 0.0011586493887209115,
						"alias": "glue",
						"f1,none": 0.3490504972495486,
						"f1_stderr,none": 0.0013384442216884647,
						"mcc,none": -0.04847021005996873,
						"mcc_stderr,none": 0.030783455837743674
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.38817285822592873,
						"exact_match_stderr,get-answer": 0.013423607564002757
					},
					"hellaswag": {
						"acc,none": 0.6131248755228043,
						"acc_norm,none": 0.8103963353913562,
						"acc_norm_stderr,none": 0.003911862797736132,
						"acc_stderr,none": 0.004860393011974709,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.33245740687265374,
						"acc_norm,none": 0.33245740687265374,
						"acc_norm_stderr,none": 0.047791173115529544,
						"acc_stderr,none": 0.047791173115529544,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.31,
						"acc_norm,none": 0.31,
						"acc_norm_stderr,none": 0.04648231987117316,
						"acc_stderr,none": 0.04648231987117316,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.286,
						"acc_norm,none": 0.286,
						"acc_norm_stderr,none": 0.014297146862517908,
						"acc_stderr,none": 0.014297146862517908,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.34,
						"acc_norm,none": 0.34,
						"acc_norm_stderr,none": 0.014987482264363935,
						"acc_stderr,none": 0.014987482264363935,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.259,
						"acc_norm,none": 0.259,
						"acc_norm_stderr,none": 0.013860415257527911,
						"acc_stderr,none": 0.013860415257527911,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.311,
						"acc_norm,none": 0.311,
						"acc_norm_stderr,none": 0.0146455963857227,
						"acc_stderr,none": 0.0146455963857227,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.32,
						"acc_norm,none": 0.32,
						"acc_norm_stderr,none": 0.019059698848626565,
						"acc_stderr,none": 0.019059698848626565,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.348,
						"acc_norm,none": 0.348,
						"acc_norm_stderr,none": 0.01507060460376841,
						"acc_stderr,none": 0.01507060460376841,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.015819299929208316,
						"acc_stderr,none": 0.015819299929208316,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.329,
						"acc_norm,none": 0.329,
						"acc_norm_stderr,none": 0.014865395385928362,
						"acc_stderr,none": 0.014865395385928362,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.03199992148231577,
						"acc_stderr,none": 0.03199992148231577,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.338,
						"acc_norm,none": 0.338,
						"acc_norm_stderr,none": 0.014965960710224487,
						"acc_stderr,none": 0.014965960710224487,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.27692307692307694,
						"acc_norm,none": 0.27692307692307694,
						"acc_norm_stderr,none": 0.039398253452664685,
						"acc_stderr,none": 0.039398253452664685,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.32,
						"acc_norm,none": 0.32,
						"acc_norm_stderr,none": 0.04688261722621504,
						"acc_stderr,none": 0.04688261722621504,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.348,
						"acc_norm,none": 0.348,
						"acc_norm_stderr,none": 0.015070604603768408,
						"acc_stderr,none": 0.015070604603768408,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.391,
						"acc_norm,none": 0.391,
						"acc_norm_stderr,none": 0.015438826294681782,
						"acc_stderr,none": 0.015438826294681782,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.014498627873361425,
						"acc_stderr,none": 0.014498627873361425,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.324,
						"acc_norm,none": 0.324,
						"acc_norm_stderr,none": 0.01480686473373886,
						"acc_stderr,none": 0.01480686473373886,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.329,
						"acc_norm,none": 0.329,
						"acc_norm_stderr,none": 0.014865395385928364,
						"acc_stderr,none": 0.014865395385928364,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.311,
						"acc_norm,none": 0.311,
						"acc_norm_stderr,none": 0.014645596385722694,
						"acc_stderr,none": 0.014645596385722694,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.335,
						"acc_norm,none": 0.335,
						"acc_norm_stderr,none": 0.014933117490932568,
						"acc_stderr,none": 0.014933117490932568,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.359,
						"acc_norm,none": 0.359,
						"acc_norm_stderr,none": 0.015177264224798594,
						"acc_stderr,none": 0.015177264224798594,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.042923469599092816,
						"acc_stderr,none": 0.042923469599092816,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.345,
						"acc_norm,none": 0.345,
						"acc_norm_stderr,none": 0.015039986742055235,
						"acc_stderr,none": 0.015039986742055235,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.437,
						"acc_norm,none": 0.437,
						"acc_norm_stderr,none": 0.015693223928730377,
						"acc_stderr,none": 0.015693223928730377,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.386,
						"acc_norm,none": 0.386,
						"acc_norm_stderr,none": 0.01540263747678438,
						"acc_stderr,none": 0.01540263747678438,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.294,
						"acc_norm,none": 0.294,
						"acc_norm_stderr,none": 0.014414290540008218,
						"acc_stderr,none": 0.014414290540008218,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.351,
						"acc_norm,none": 0.351,
						"acc_norm_stderr,none": 0.015100563798316403,
						"acc_stderr,none": 0.015100563798316403,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.014566646394664385,
						"acc_stderr,none": 0.014566646394664385,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.32166666666666666,
						"acc_norm,none": 0.32166666666666666,
						"acc_norm_stderr,none": 0.019085836431523086,
						"acc_stderr,none": 0.019085836431523086,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.346,
						"acc_norm,none": 0.346,
						"acc_norm_stderr,none": 0.015050266127564455,
						"acc_stderr,none": 0.015050266127564455,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.304,
						"acc_norm,none": 0.304,
						"acc_norm_stderr,none": 0.014553205687950432,
						"acc_stderr,none": 0.014553205687950432,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.321,
						"acc_norm,none": 0.321,
						"acc_norm_stderr,none": 0.01477082181793464,
						"acc_stderr,none": 0.01477082181793464,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.014370995982377946,
						"acc_stderr,none": 0.014370995982377946,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.04408440022768077,
						"acc_stderr,none": 0.04408440022768077,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.30666666666666664,
						"acc_norm,none": 0.30666666666666664,
						"acc_norm_stderr,none": 0.026666666666666658,
						"acc_stderr,none": 0.026666666666666658,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.013569640199177462,
						"acc_stderr,none": 0.013569640199177462,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.335,
						"acc_norm,none": 0.335,
						"acc_norm_stderr,none": 0.014933117490932573,
						"acc_stderr,none": 0.014933117490932573,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.303,
						"acc_norm,none": 0.303,
						"acc_norm_stderr,none": 0.014539683710535253,
						"acc_stderr,none": 0.014539683710535253,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.03263741725420571,
						"acc_stderr,none": 0.03263741725420571,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.298,
						"acc_norm,none": 0.298,
						"acc_norm_stderr,none": 0.01447084674113472,
						"acc_stderr,none": 0.01447084674113472,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.328,
						"acc_norm,none": 0.328,
						"acc_norm_stderr,none": 0.01485384248727033,
						"acc_stderr,none": 0.01485384248727033,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.03216633903375033,
						"acc_stderr,none": 0.03216633903375033,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.416,
						"acc_norm,none": 0.416,
						"acc_norm_stderr,none": 0.015594460144140601,
						"acc_stderr,none": 0.015594460144140601,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5507564130673098,
						"acc_norm,none": 0.518,
						"acc_norm_stderr,none": 0.0005003527054108269,
						"acc_stderr,none": 0.05115523112210208,
						"alias": "kobest",
						"f1,none": 0.4791948665252917,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.6274928774928775,
						"acc_stderr,none": 0.012907521446784632,
						"alias": " - kobest_boolq",
						"f1,none": 0.5779579332760488,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.015572363292015104,
						"alias": " - kobest_copa",
						"f1,none": 0.5873397435897436,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.43,
						"acc_norm,none": 0.518,
						"acc_norm_stderr,none": 0.02236856511738799,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.42600001945351906,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5365239294710328,
						"acc_stderr,none": 0.02505881982355679,
						"alias": " - kobest_sentineg",
						"f1,none": 0.4043705153294194,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604038,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7254997089074325,
						"acc_stderr,none": 0.01676361499326025,
						"alias": "lambada",
						"perplexity,none": 3.477999261370286,
						"perplexity_stderr,none": 0.16329169211004324
					},
					"lambada_cloze": {
						"acc,none": 0.08354356685425965,
						"acc_stderr,none": 0.017359537873426072,
						"alias": "lambada_cloze",
						"perplexity,none": 92.9621493847383,
						"perplexity_stderr,none": 4.749192287461508
					},
					"lambada_multilingual": {
						"acc,none": 0.5186493304870949,
						"acc_stderr,none": 0.09575650539776685,
						"alias": "lambada_multilingual",
						"perplexity,none": 27.056756454439657,
						"perplexity_stderr,none": 10.55570565115782
					},
					"lambada_openai": {
						"acc,none": 0.755288181641762,
						"acc_stderr,none": 0.005989573373070082,
						"alias": " - lambada_openai",
						"perplexity,none": 3.180029723439131,
						"perplexity_stderr,none": 0.05830830767398106
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.04967979817581991,
						"acc_stderr,none": 0.0030271710751734893,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 84.91463890370277,
						"perplexity_stderr,none": 2.1929498791439825
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.4001552493692994,
						"acc_stderr,none": 0.006825677476606509,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 43.33029030872575,
						"perplexity_stderr,none": 2.393759617994182
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7562584901998836,
						"acc_stderr,none": 0.005981525423321768,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.1795871820811916,
						"perplexity_stderr,none": 0.05830848485872709
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.4267417038618281,
						"acc_stderr,none": 0.006890802308382406,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 36.28812043227467,
						"perplexity_stderr,none": 1.791789103370077
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5208616339996118,
						"acc_stderr,none": 0.00695991172085145,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 22.21756518405148,
						"perplexity_stderr,none": 1.1041376692704696
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.48922957500485154,
						"acc_stderr,none": 0.006964361334232535,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 30.268219165065197,
						"perplexity_stderr,none": 1.6670875958452993
					},
					"lambada_standard": {
						"acc,none": 0.694352804191733,
						"acc_stderr,none": 0.006418187162765869,
						"alias": " - lambada_standard",
						"perplexity,none": 3.776606949709117,
						"perplexity_stderr,none": 0.07308622187542482
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.1174073355326994,
						"acc_stderr,none": 0.004484766596365691,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 101.0096598657738,
						"perplexity_stderr,none": 2.813806288232997
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.494910941475827,
						"exact_match_stderr,get-answer": 0.012614191372690004
					},
					"logiqa": {
						"acc,none": 0.24270353302611367,
						"acc_norm,none": 0.30261136712749614,
						"acc_norm_stderr,none": 0.018018696598158846,
						"acc_stderr,none": 0.01681567620647953,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.30216284987277353,
						"acc_norm,none": 0.30916030534351147,
						"acc_norm_stderr,none": 0.011659835223676902,
						"acc_stderr,none": 0.011585358690310618,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.35544388609715244,
						"acc_norm,none": 0.35845896147403683,
						"acc_norm_stderr,none": 0.008778747002389665,
						"acc_stderr,none": 0.008762266964873266,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.6929675916119467,
						"acc_stderr,none": 0.004747222342042236,
						"alias": "mc_taco",
						"f1,none": 0.5734883036633809,
						"f1_stderr,none": 0.007155483444682045
					},
					"medmcqa": {
						"acc,none": 0.4819507530480516,
						"acc_norm,none": 0.4819507530480516,
						"acc_norm_stderr,none": 0.007726714059604551,
						"acc_stderr,none": 0.007726714059604551,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.5106048703849175,
						"acc_norm,none": 0.5106048703849175,
						"acc_norm_stderr,none": 0.014016150183915747,
						"acc_stderr,none": 0.014016150183915747,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.596425010682239,
						"acc_stderr,none": 0.13844007438448744,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.5555555555555556,
						"acc_stderr,none": 0.042925967182569816,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.618421052631579,
						"acc_stderr,none": 0.03953173377749194,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.049604496374885836,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.6830188679245283,
						"acc_stderr,none": 0.0286372356398009,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.6805555555555556,
						"acc_stderr,none": 0.03899073687357336,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.050211673156867795,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.04988876515698589,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.5895953757225434,
						"acc_stderr,none": 0.03750757044895537,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.45098039215686275,
						"acc_stderr,none": 0.04951218252396262,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.5319148936170213,
						"acc_stderr,none": 0.03261936918467382,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.42105263157894735,
						"acc_stderr,none": 0.046446020912223177,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.5724137931034483,
						"acc_stderr,none": 0.04122737111370332,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.3915343915343915,
						"acc_stderr,none": 0.025138091388851116,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.35714285714285715,
						"acc_stderr,none": 0.04285714285714281,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.7387096774193549,
						"acc_stderr,none": 0.024993053397764822,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.49261083743842365,
						"acc_stderr,none": 0.035176035403610084,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.66,
						"acc_stderr,none": 0.04760952285695237,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.7515151515151515,
						"acc_stderr,none": 0.033744026441394036,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.7373737373737373,
						"acc_stderr,none": 0.03135305009533084,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.844559585492228,
						"acc_stderr,none": 0.02614848346915332,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.5897435897435898,
						"acc_stderr,none": 0.02493931390694079,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.34074074074074073,
						"acc_stderr,none": 0.028897748741131133,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.634453781512605,
						"acc_stderr,none": 0.0312821770636846,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.304635761589404,
						"acc_stderr,none": 0.037579499229433426,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.7798165137614679,
						"acc_stderr,none": 0.01776597865232756,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.48148148148148145,
						"acc_stderr,none": 0.034076320938540516,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.7598039215686274,
						"acc_stderr,none": 0.02998373305591361,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.7805907172995781,
						"acc_stderr,none": 0.026939106581553945,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.6591928251121076,
						"acc_stderr,none": 0.0318114974705536,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.7557251908396947,
						"acc_stderr,none": 0.03768335959728745,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.5343251859723698,
						"acc_stderr,none": 0.16186401202777495,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.7520661157024794,
						"acc_stderr,none": 0.039418975265163025,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.7407407407407407,
						"acc_stderr,none": 0.04236511258094631,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.7607361963190185,
						"acc_stderr,none": 0.0335195387952127,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.48214285714285715,
						"acc_stderr,none": 0.047427623612430116,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.7864077669902912,
						"acc_stderr,none": 0.04058042015646035,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.8589743589743589,
						"acc_stderr,none": 0.02280138253459753,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.7956577266922095,
						"acc_stderr,none": 0.014419123980931904,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.6791907514450867,
						"acc_stderr,none": 0.0251310002336479,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24134078212290502,
						"acc_stderr,none": 0.014310999547961464,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.7091503267973857,
						"acc_stderr,none": 0.02600480036395213,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.6816865143224976,
						"acc_stderr,none": 0.09811125388055714,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.6881028938906752,
						"acc_stderr,none": 0.02631185807185416,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.7129629629629629,
						"acc_stderr,none": 0.025171041915309684,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.46099290780141844,
						"acc_stderr,none": 0.02973659252642444,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.44002607561929596,
						"acc_stderr,none": 0.012678037478574513,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.6801470588235294,
						"acc_stderr,none": 0.028332959514031232,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.6160130718954249,
						"acc_stderr,none": 0.01967580813528152,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.6636363636363637,
						"acc_stderr,none": 0.04525393596302506,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.7020408163265306,
						"acc_stderr,none": 0.029279567411065667,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.6958076048098798,
						"acc_stderr,none": 0.08956129050566648,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.8656716417910447,
						"acc_stderr,none": 0.024112678240900836,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.5080875356803045,
						"acc_stderr,none": 0.1215476495130753,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.0377525168068637,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.5060240963855421,
						"acc_stderr,none": 0.03892212195333045,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.8128654970760234,
						"acc_stderr,none": 0.029913127232368032,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.4542027508914926,
						"acc_stderr,none": 0.005025942602094432,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.4631814483319772,
						"acc_stderr,none": 0.005029102510704409,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6568627450980392,
						"acc_stderr,none": 0.023532824020694156,
						"alias": "mrpc",
						"f1,none": 0.7426470588235294,
						"f1_stderr,none": 0.02115915180153455
					},
					"multimedqa": {
						"acc,none": 0.5325762952448545,
						"acc_norm,none": 0.4918553467234212,
						"acc_norm_stderr,none": 0.0001327699679693839,
						"acc_stderr,none": 0.06628005425577563,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5691006600660066,
						"acc_stderr,none": 0.007112887654223405,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7204665161775772,
						"mrr_stderr,none": 0.010218811328814581,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407435,
						"r@2,none": 0.42099322799097066,
						"r@2_stderr,none": 0.016596164895518038
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6669488337095556,
						"mrr_stderr,none": 0.010463015830979078,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.01473704740275095,
						"r@2,none": 0.44808126410835214,
						"r@2_stderr,none": 0.01671646047143711
					},
					"openbookqa": {
						"acc,none": 0.33,
						"acc_norm,none": 0.442,
						"acc_norm_stderr,none": 0.02223197069632112,
						"acc_stderr,none": 0.021049612166134803,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.3825,
						"acc_stderr,none": 0.01086995643857379,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3145,
						"acc_stderr,none": 0.010385027655220813,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.346,
						"acc_stderr,none": 0.010639483037236658,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.4905,
						"acc_stderr,none": 0.011181117282805228,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.011154111668060216,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.418,
						"acc_stderr,none": 0.01103172014804208,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.416,
						"acc_stderr,none": 0.011024190055654281,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4147857142857143,
						"acc_stderr,none": 0.057291271279150895,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.8052230685527747,
						"acc_norm,none": 0.8204570184983678,
						"acc_norm_stderr,none": 0.00895483432920114,
						"acc_stderr,none": 0.009240006693317723,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.3476729291204099,
						"acc_norm,none": 0.3074295473953886,
						"acc_norm_stderr,none": 0.0033711488878894512,
						"acc_stderr,none": 0.0034792952996372042,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.754,
						"acc_stderr,none": 0.019279819056352475,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7809920153879631,
						"acc_norm,none": 0.7148213731718109,
						"acc_norm_stderr,none": 0.0082806592848003,
						"acc_stderr,none": 0.15963097565490927,
						"alias": "pythia",
						"bits_per_byte,none": 0.7790910200090022,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.7160493234446637,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.180029723439131,
						"perplexity_stderr,none": 0.05830830767398106,
						"word_perplexity,none": 17.952971816297133,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4875886524822695,
						"acc_norm,none": 0.5372340425531915,
						"acc_norm_stderr,none": 0.07588716246805031,
						"acc_stderr,none": 0.04415178467483081,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.5416666666666666,
						"acc_norm,none": 0.6833333333333333,
						"acc_norm_stderr,none": 0.04264263153554635,
						"acc_stderr,none": 0.04567549854280212,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.5,
						"acc_norm,none": 0.56875,
						"acc_norm_stderr,none": 0.03927594984018919,
						"acc_stderr,none": 0.03965257928590721,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.45774647887323944,
						"acc_norm,none": 0.45774647887323944,
						"acc_norm_stderr,none": 0.02961559611759778,
						"acc_stderr,none": 0.029615596117597787,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.49789492952590153,
						"acc_stderr,none": 0.00676535059208955,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5387336136532278,
						"acc_stderr,none": 0.0024792278452134536,
						"alias": "qqp",
						"f1,none": 0.3456720816813445,
						"f1_stderr,none": 0.0036333557486569264
					},
					"race": {
						"acc,none": 0.40861244019138754,
						"acc_stderr,none": 0.015213937761630927,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.6750902527075813,
						"acc_stderr,none": 0.028190822551170353,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.959,
						"acc_norm,none": 0.939,
						"acc_norm_stderr,none": 0.007572076091557429,
						"acc_stderr,none": 0.006273624021118743,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6750902527075813,
						"acc_stderr,none": 0.028190822551170353,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.6708715596330275,
						"acc_stderr,none": 0.01592184233279754,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5755773268019594,
						"acc_norm,none": 0.7741677496750975,
						"acc_norm_stderr,none": 0.0029562505640686877,
						"acc_stderr,none": 0.0034944742875050363,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.803766929553093,
						"acc_stderr,none": 0.08597393979979331,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.9355969551282052,
						"acc_stderr,none": 0.0024567845065233285,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8834498834498834,
						"acc_stderr,none": 0.0032305521742775297,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5976470588235294,
						"acc_stderr,none": 0.0048556479063216655,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.35372429452780935,
						"acc_stderr,none": 0.0015289998228574559,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.41370869033047736,
						"bleu_acc_stderr,none": 0.0172408618120998,
						"bleu_diff,none": -1.4792658300937298,
						"bleu_diff_stderr,none": 1.0365996246291096,
						"bleu_max,none": 30.506553066296135,
						"bleu_max_stderr,none": 0.8545048499620589,
						"rouge1_acc,none": 0.397796817625459,
						"rouge1_acc_stderr,none": 0.017133934248559652,
						"rouge1_diff,none": -1.916659079286129,
						"rouge1_diff_stderr,none": 1.2644440329641677,
						"rouge1_max,none": 55.752927992652076,
						"rouge1_max_stderr,none": 0.9304517781051922,
						"rouge2_acc,none": 0.3537331701346389,
						"rouge2_acc_stderr,none": 0.016737814358846147,
						"rouge2_diff,none": -2.594294632926603,
						"rouge2_diff_stderr,none": 1.4182491490828897,
						"rouge2_max,none": 40.96291082037781,
						"rouge2_max_stderr,none": 1.1007195429525476,
						"rougeL_acc,none": 0.40269277845777235,
						"rougeL_acc_stderr,none": 0.017168830935187215,
						"rougeL_diff,none": -2.232349573503825,
						"rougeL_diff_stderr,none": 1.2801381361084667,
						"rougeL_max,none": 53.07319351290984,
						"rougeL_max_stderr,none": 0.9465416042598798
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.41370869033047736,
						"bleu_acc_stderr,none": 0.0172408618120998,
						"bleu_diff,none": -1.4792658300937298,
						"bleu_diff_stderr,none": 1.0365996246291096,
						"bleu_max,none": 30.506553066296135,
						"bleu_max_stderr,none": 0.8545048499620589,
						"rouge1_acc,none": 0.397796817625459,
						"rouge1_acc_stderr,none": 0.017133934248559652,
						"rouge1_diff,none": -1.916659079286129,
						"rouge1_diff_stderr,none": 1.2644440329641677,
						"rouge1_max,none": 55.752927992652076,
						"rouge1_max_stderr,none": 0.9304517781051922,
						"rouge2_acc,none": 0.3537331701346389,
						"rouge2_acc_stderr,none": 0.016737814358846147,
						"rouge2_diff,none": -2.594294632926603,
						"rouge2_diff_stderr,none": 1.4182491490828897,
						"rouge2_max,none": 40.96291082037781,
						"rouge2_max_stderr,none": 1.1007195429525476,
						"rougeL_acc,none": 0.40269277845777235,
						"rougeL_acc_stderr,none": 0.017168830935187215,
						"rougeL_diff,none": -2.232349573503825,
						"rougeL_diff_stderr,none": 1.2801381361084667,
						"rougeL_max,none": 53.07319351290984,
						"rougeL_max_stderr,none": 0.9465416042598798
					},
					"truthfulqa_mc1": {
						"acc,none": 0.28151774785801714,
						"acc_stderr,none": 0.01574402724825605,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4259308411976015,
						"acc_stderr,none": 0.01420956064029871,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.15403543307086615,
						"exact_match_stderr,none": 0.008009980186286517
					},
					"wic": {
						"acc,none": 0.5783699059561128,
						"acc_stderr,none": 0.019565859392130985,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.7790910200090022,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.7160493234446637,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 17.952971816297133,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.7363851617995264,
						"acc_stderr,none": 0.012382849299658464,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5774647887323944,
						"acc_stderr,none": 0.05903984205682581,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.40384615384615385,
						"acc_stderr,none": 0.04834688952654018,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8901098901098901,
						"acc_stderr,none": 0.018963420053918545,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5594545454545454,
						"acc_stderr,none": 0.05387910421610255,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.472,
						"acc_stderr,none": 0.022347949832668093,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.022371610982580396,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.022064943313928866,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.66,
						"acc_stderr,none": 0.021206117013673066,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.484,
						"acc_stderr,none": 0.022371610982580396,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.022288147591176945,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.02218721580302901,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.022033677993740862,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.021352091786223104,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.43052208835341366,
						"acc_stderr,none": 0.05866526647573086,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667055,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.4570281124497992,
						"acc_stderr,none": 0.009984991084561275,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4971887550200803,
						"acc_stderr,none": 0.010021914455122174,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.41124497991967873,
						"acc_stderr,none": 0.00986291222354463,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5662650602409639,
						"acc_stderr,none": 0.009933667945702098,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.457429718875502,
						"acc_stderr,none": 0.00998568222022746,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.5088353413654618,
						"acc_stderr,none": 0.010020508033762627,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43012048192771085,
						"acc_stderr,none": 0.00992371167540806,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4975903614457831,
						"acc_stderr,none": 0.010021956483068082,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.35542168674698793,
						"acc_stderr,none": 0.009593947957927139,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3887550200803213,
						"acc_stderr,none": 0.009770869423441493,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.43092369477911646,
						"acc_stderr,none": 0.009925970741520641,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3385542168674699,
						"acc_stderr,none": 0.00948525020851688,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.40923694779116465,
						"acc_stderr,none": 0.009855567414480236,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3755020080321285,
						"acc_stderr,none": 0.009706422844379824,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5916611515552614,
						"acc_stderr,none": 0.07661315123253779,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5307743216412971,
						"acc_stderr,none": 0.012842730340585787,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7882197220383852,
						"acc_stderr,none": 0.010514241109625348,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6902713434811383,
						"acc_stderr,none": 0.011899045981288764,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5115817339510258,
						"acc_stderr,none": 0.012863672949335879,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5526141628060887,
						"acc_stderr,none": 0.012795688167385287,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5956320317670417,
						"acc_stderr,none": 0.012629580396570946,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4870946393117141,
						"acc_stderr,none": 0.012862838605728477,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6657842488418266,
						"acc_stderr,none": 0.012139246810918228,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.514228987425546,
						"acc_stderr,none": 0.01286191399959613,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5387160820648577,
						"acc_stderr,none": 0.012828493353271539,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6333553937789543,
						"acc_stderr,none": 0.012401034429990705,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8120926050797932,
						"acc_stderr,none": 0.04148122639462828,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8851612903225806,
						"acc_stderr,none": 0.006613590439489932,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7469879518072289,
						"acc_stderr,none": 0.04800875830437279,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7194994786235662,
						"acc_stderr,none": 0.014514407890552966,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7642585551330798,
						"acc_stderr,none": 0.026223308206222554,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6857142857142857,
						"acc_stderr,none": 0.026198057744026396,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7658730158730159,
						"acc_stderr,none": 0.018880788485078296,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "mistralai/Mistral-7B-v0.1"
	},
	"mosaicml/mpt-7b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.632750845546787,
						"acc_norm,none": 0.6054114994363021,
						"acc_norm_stderr,none": 0.0456764314880605,
						"acc_stderr,none": 0.05584678295546728,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3403125,
						"acc_stderr,none": 0.01484116484398752,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.05615,
						"acc_stderr,none": 0.06998942764954397,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.834044776119403,
						"acc_stderr,none": 0.15059206831641733,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.262258543833581,
						"acc_norm,none": 0.262258543833581,
						"acc_norm_stderr,none": 0.12146724655023557,
						"acc_stderr,none": 0.12146724655023557,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2685201174235884,
						"acc_norm,none": 0.2685201174235884,
						"acc_norm_stderr,none": 0.042112671024529806,
						"acc_stderr,none": 0.042112671024529806,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.5930046213476445,
						"likelihood_diff_stderr,none": 0.4485120786198167,
						"pct_stereotype,none": 0.5861657722122839,
						"pct_stereotype_stderr,none": 0.08618208256220111
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05511811023622047,
						"exact_match_stderr,none": 0.0050638514892882985
					},
					"glue": {
						"acc,none": 0.4873570944877401,
						"acc_stderr,none": 0.0647330299966228,
						"alias": "glue",
						"f1,none": 0.37734063591955397,
						"f1_stderr,none": 0.001383566438455718,
						"mcc,none": 0.002047167688916013,
						"mcc_stderr,none": 0.000961027119918923
					},
					"kmmlu": {
						"acc,none": 0.16162287034363268,
						"acc_norm,none": 0.16162287034363268,
						"acc_norm_stderr,none": 0.03761631346537425,
						"acc_stderr,none": 0.03761631346537425,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.4972593729445297,
						"acc_norm,none": 0.472,
						"acc_norm_stderr,none": 0.00049943086172345,
						"acc_stderr,none": 0.03603153727459309,
						"alias": "kobest",
						"f1,none": 0.41132274129467367,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6517562584901999,
						"acc_stderr,none": 0.016834707882291924,
						"alias": "lambada",
						"perplexity,none": 4.399679001639323,
						"perplexity_stderr,none": 0.27949501209155936
					},
					"lambada_cloze": {
						"acc,none": 0.2821657287017271,
						"acc_stderr,none": 0.0579568846515385,
						"alias": "lambada_cloze",
						"perplexity,none": 67.58735109794539,
						"perplexity_stderr,none": 20.635442097454224
					},
					"lambada_multilingual": {
						"acc,none": 0.44366388511546667,
						"acc_stderr,none": 0.08811220105550514,
						"alias": "lambada_multilingual",
						"perplexity,none": 49.825227199829776,
						"perplexity_stderr,none": 17.660399867678677
					},
					"mmlu": {
						"acc,none": 0.2913402649195272,
						"acc_stderr,none": 0.047641057901318375,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2871413390010627,
						"acc_stderr,none": 0.04377949041651031,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.32281943997425167,
						"acc_stderr,none": 0.047811192595901685,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.29314267143321415,
						"acc_stderr,none": 0.03440263964438806,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2648271487472249,
						"acc_stderr,none": 0.05376304865956802,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.3227821149751597,
						"acc_norm,none": 0.28858990000342066,
						"acc_norm_stderr,none": 9.102416685018394e-05,
						"acc_stderr,none": 0.11416116085430851,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4352857142857143,
						"acc_stderr,none": 0.0615743455578487,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7318073377630966,
						"acc_norm,none": 0.6103462487265284,
						"acc_norm_stderr,none": 0.004492865877393056,
						"acc_stderr,none": 0.1426577222241238,
						"alias": "pythia",
						"bits_per_byte,none": 0.6201211884072789,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5370042863816313,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.877221147872283,
						"perplexity_stderr,none": 0.08122881240478526,
						"word_perplexity,none": 9.959459472125278,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3971631205673759,
						"acc_norm,none": 0.48226950354609927,
						"acc_norm_stderr,none": 0.053804311014196526,
						"acc_stderr,none": 0.041826155148564415,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5750557385777512,
						"acc_stderr,none": 0.03710239568682007,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.2708315899837244,
						"acc_stderr,none": 0.0011717067589892365,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.016095884155386844,
						"bleu_diff,none": -10.064418516236938,
						"bleu_diff_stderr,none": 0.848497644094101,
						"bleu_max,none": 24.678819878959306,
						"bleu_max_stderr,none": 0.7719089135650388,
						"rouge1_acc,none": 0.26560587515299877,
						"rouge1_acc_stderr,none": 0.01546102762725359,
						"rouge1_diff,none": -12.27036240470745,
						"rouge1_diff_stderr,none": 0.9300030207544127,
						"rouge1_max,none": 48.689929876362534,
						"rouge1_max_stderr,none": 0.8981865263191734,
						"rouge2_acc,none": 0.20685434516523868,
						"rouge2_acc_stderr,none": 0.014179591496728327,
						"rouge2_diff,none": -14.882571896093566,
						"rouge2_diff_stderr,none": 1.098599472827099,
						"rouge2_max,none": 32.38876809163368,
						"rouge2_max_stderr,none": 1.0196427561268149,
						"rougeL_acc,none": 0.2631578947368421,
						"rougeL_acc_stderr,none": 0.015415241740237012,
						"rougeL_diff,none": -12.754023969579247,
						"rougeL_diff_stderr,none": 0.938647677805448,
						"rougeL_max,none": 46.01510435568951,
						"rougeL_max_stderr,none": 0.9047163127400675
					},
					"xcopa": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.04233919229869232,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.39836680053547524,
						"acc_stderr,none": 0.05310877280620812,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5627820227423138,
						"acc_stderr,none": 0.07635437818393377,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7691616093504158,
						"acc_stderr,none": 0.05177053504539272,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.632750845546787,
						"acc_norm,none": 0.6054114994363021,
						"acc_norm_stderr,none": 0.0456764314880605,
						"acc_stderr,none": 0.05584678295546728,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3403125,
						"acc_stderr,none": 0.01484116484398752,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.01493311749093257,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.338,
						"acc_stderr,none": 0.014965960710224482,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3466666666666667,
						"acc_stderr,none": 0.013744022550571956,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3967576791808874,
						"acc_norm,none": 0.41467576791808874,
						"acc_norm_stderr,none": 0.014397070564409174,
						"acc_stderr,none": 0.014296513020180635,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7491582491582491,
						"acc_norm,none": 0.6994949494949495,
						"acc_norm_stderr,none": 0.009407763090599318,
						"acc_stderr,none": 0.00889518301048739,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.05615,
						"acc_stderr,none": 0.06998942764954397,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0185,
						"acc_stderr,none": 0.0030138707185866534,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.2015,
						"acc_stderr,none": 0.00897157285874561,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0485,
						"acc_stderr,none": 0.004804728682127106,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.2285,
						"acc_stderr,none": 0.009390844955832967,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0445,
						"acc_stderr,none": 0.004611996341621297,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.017,
						"acc_stderr,none": 0.002891311093590553,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521504,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.000500000000000013,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.0007069298939339509,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.006941431670281995,
						"acc_stderr,none": 0.001729699741707206,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.834044776119403,
						"acc_stderr,none": 0.15059206831641733,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524305,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426557,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844884,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366648,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.872,
						"acc_stderr,none": 0.01057013376110866,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.77,
						"acc_stderr,none": 0.013314551335935959,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.591,
						"acc_stderr,none": 0.015555094373257946,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.823,
						"acc_stderr,none": 0.012075463420375061,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855733,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.003299983316607816,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.0049395748196984475,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.0071508835212954446,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792942,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866444,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704164,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727066,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.008230354715244052,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.783,
						"acc_stderr,none": 0.01304151375727071,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.783,
						"acc_stderr,none": 0.01304151375727071,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731979,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571409,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890141,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045083,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.325,
						"acc_stderr,none": 0.014818724459095524,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400229,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.812,
						"acc_stderr,none": 0.012361586015103752,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.676,
						"acc_stderr,none": 0.014806864733738863,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731965,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.002987963843142649,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.010878848714333315,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315151,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.00900889339265154,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.015814743314581818,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.00949157995752505,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.762,
						"acc_stderr,none": 0.01347358666196723,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.632,
						"acc_stderr,none": 0.0152580735615218,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.697,
						"acc_stderr,none": 0.014539683710535265,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897893,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.855,
						"acc_stderr,none": 0.011139977517890132,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996702,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.00944924802766274,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.01295371756673723,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.00619987406633706,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987295,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.771,
						"acc_stderr,none": 0.013294199326613614,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.01566350361015528,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.00619987406633707,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340995,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.0032999833166078153,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.012841374572096909,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.491,
						"acc_stderr,none": 0.015816736995005392,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963528002,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400224,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.684,
						"acc_stderr,none": 0.014709193056057121,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.010811655372416051,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.0109121526325044,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.826,
						"acc_stderr,none": 0.011994493230973421,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.817,
						"acc_stderr,none": 0.012233587399477828,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584936,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.0057338361396954704,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.015039986742055235,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.318,
						"acc_stderr,none": 0.014734079309311901,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7363914373088685,
						"acc_stderr,none": 0.00770595841908305,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.4107142857142857,
						"acc_stderr,none": 0.0663363415035954,
						"alias": "cb",
						"f1,none": 0.1940928270042194,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.262258543833581,
						"acc_norm,none": 0.262258543833581,
						"acc_norm_stderr,none": 0.12146724655023557,
						"acc_stderr,none": 0.12146724655023557,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966264,
						"acc_stderr,none": 0.06520506636966264,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.45454545454545453,
						"acc_norm,none": 0.45454545454545453,
						"acc_norm_stderr,none": 0.08802234877744129,
						"acc_stderr,none": 0.08802234877744129,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122594,
						"acc_stderr,none": 0.08503766788122594,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.06429065810876616,
						"acc_stderr,none": 0.06429065810876616,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.06288639360110458,
						"acc_stderr,none": 0.06288639360110458,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.35135135135135137,
						"acc_norm,none": 0.35135135135135137,
						"acc_norm_stderr,none": 0.0795654132101608,
						"acc_stderr,none": 0.0795654132101608,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.1875,
						"acc_norm,none": 0.1875,
						"acc_norm_stderr,none": 0.10077822185373188,
						"acc_stderr,none": 0.10077822185373188,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.1891891891891892,
						"acc_norm,none": 0.1891891891891892,
						"acc_norm_stderr,none": 0.06527647182968215,
						"acc_stderr,none": 0.06527647182968215,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.3225806451612903,
						"acc_norm,none": 0.3225806451612903,
						"acc_norm_stderr,none": 0.08534681648595455,
						"acc_stderr,none": 0.08534681648595455,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.55,
						"acc_norm,none": 0.55,
						"acc_norm_stderr,none": 0.11413288653790232,
						"acc_stderr,none": 0.11413288653790232,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.11433239009500591,
						"acc_stderr,none": 0.11433239009500591,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002614,
						"acc_stderr,none": 0.07770873402002614,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215394,
						"acc_stderr,none": 0.07180198468215394,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.08191780219091252,
						"acc_stderr,none": 0.08191780219091252,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.0914486154730632,
						"acc_stderr,none": 0.0914486154730632,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434487,
						"acc_stderr,none": 0.07233518641434487,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.4482758620689655,
						"acc_norm,none": 0.4482758620689655,
						"acc_norm_stderr,none": 0.09398415777506855,
						"acc_stderr,none": 0.09398415777506855,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.16326530612244897,
						"acc_norm,none": 0.16326530612244897,
						"acc_norm_stderr,none": 0.05334825558285076,
						"acc_stderr,none": 0.05334825558285076,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.07102933373079214,
						"acc_stderr,none": 0.07102933373079214,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.056503155622080935,
						"acc_stderr,none": 0.056503155622080935,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2685201174235884,
						"acc_norm,none": 0.2685201174235884,
						"acc_norm_stderr,none": 0.042112671024529806,
						"acc_stderr,none": 0.042112671024529806,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2781065088757396,
						"acc_norm,none": 0.2781065088757396,
						"acc_norm_stderr,none": 0.03456905430376244,
						"acc_stderr,none": 0.03456905430376244,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.23648648648648649,
						"acc_norm,none": 0.23648648648648649,
						"acc_norm_stderr,none": 0.03504716241250436,
						"acc_stderr,none": 0.03504716241250436,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.3090909090909091,
						"acc_norm,none": 0.3090909090909091,
						"acc_norm_stderr,none": 0.03608541011573967,
						"acc_stderr,none": 0.03608541011573967,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.31100478468899523,
						"acc_norm,none": 0.31100478468899523,
						"acc_norm_stderr,none": 0.032096669533489795,
						"acc_stderr,none": 0.032096669533489795,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.034049163262375844,
						"acc_stderr,none": 0.034049163262375844,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2900763358778626,
						"acc_norm,none": 0.2900763358778626,
						"acc_norm_stderr,none": 0.03980066246467766,
						"acc_stderr,none": 0.03980066246467766,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037267799624996496,
						"acc_stderr,none": 0.037267799624996496,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2616822429906542,
						"acc_norm,none": 0.2616822429906542,
						"acc_norm_stderr,none": 0.042692919157281094,
						"acc_stderr,none": 0.042692919157281094,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25386996904024767,
						"acc_norm,none": 0.25386996904024767,
						"acc_norm_stderr,none": 0.02425409025245806,
						"acc_stderr,none": 0.02425409025245806,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03039153369274154,
						"acc_stderr,none": 0.03039153369274154,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.24022346368715083,
						"acc_norm,none": 0.24022346368715083,
						"acc_norm_stderr,none": 0.032021424638044936,
						"acc_stderr,none": 0.032021424638044936,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2742616033755274,
						"acc_norm,none": 0.2742616033755274,
						"acc_norm_stderr,none": 0.029041333510598035,
						"acc_stderr,none": 0.029041333510598035,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.27102803738317754,
						"acc_norm,none": 0.27102803738317754,
						"acc_norm_stderr,none": 0.04317273776566668,
						"acc_stderr,none": 0.04317273776566668,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.040842473153371,
						"acc_stderr,none": 0.040842473153371,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.043300437496507416,
						"acc_stderr,none": 0.043300437496507416,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055043,
						"acc_stderr,none": 0.04232473532055043,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.29245283018867924,
						"acc_norm,none": 0.29245283018867924,
						"acc_norm_stderr,none": 0.04439263906199628,
						"acc_stderr,none": 0.04439263906199628,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.24175824175824176,
						"acc_norm,none": 0.24175824175824176,
						"acc_norm_stderr,none": 0.025960319996852693,
						"acc_stderr,none": 0.025960319996852693,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.28431372549019607,
						"acc_norm,none": 0.28431372549019607,
						"acc_norm_stderr,none": 0.031660096793998116,
						"acc_stderr,none": 0.031660096793998116,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.25146198830409355,
						"acc_norm,none": 0.25146198830409355,
						"acc_norm_stderr,none": 0.033275044238468436,
						"acc_stderr,none": 0.033275044238468436,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.3197278911564626,
						"acc_norm,none": 0.3197278911564626,
						"acc_norm_stderr,none": 0.03859714365657015,
						"acc_stderr,none": 0.03859714365657015,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.28776978417266186,
						"acc_norm,none": 0.28776978417266186,
						"acc_norm_stderr,none": 0.03853836179233389,
						"acc_stderr,none": 0.03853836179233389,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.2578616352201258,
						"acc_norm,none": 0.2578616352201258,
						"acc_norm_stderr,none": 0.03480224533547635,
						"acc_stderr,none": 0.03480224533547635,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2883435582822086,
						"acc_norm,none": 0.2883435582822086,
						"acc_norm_stderr,none": 0.03559039531617342,
						"acc_stderr,none": 0.03559039531617342,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.03336605189761063,
						"acc_stderr,none": 0.03336605189761063,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.028271399816988542,
						"acc_stderr,none": 0.028271399816988542,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03053289223393203,
						"acc_stderr,none": 0.03053289223393203,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.25210084033613445,
						"acc_norm,none": 0.25210084033613445,
						"acc_norm_stderr,none": 0.028205545033277733,
						"acc_stderr,none": 0.028205545033277733,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.029017133559381274,
						"acc_stderr,none": 0.029017133559381274,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.28888888888888886,
						"acc_norm,none": 0.28888888888888886,
						"acc_norm_stderr,none": 0.0391545063041425,
						"acc_stderr,none": 0.0391545063041425,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.25874125874125875,
						"acc_norm,none": 0.25874125874125875,
						"acc_norm_stderr,none": 0.03675137438900236,
						"acc_stderr,none": 0.03675137438900236,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.26136363636363635,
						"acc_norm,none": 0.26136363636363635,
						"acc_norm_stderr,none": 0.03321382551635589,
						"acc_stderr,none": 0.03321382551635589,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.24161073825503357,
						"acc_norm,none": 0.24161073825503357,
						"acc_norm_stderr,none": 0.03518627932594346,
						"acc_stderr,none": 0.03518627932594346,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676976,
						"acc_stderr,none": 0.03410167836676976,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552485,
						"acc_stderr,none": 0.03703667194552485,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.23728813559322035,
						"acc_norm,none": 0.23728813559322035,
						"acc_norm_stderr,none": 0.03933012549934383,
						"acc_stderr,none": 0.03933012549934383,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.034450002891734596,
						"acc_stderr,none": 0.034450002891734596,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.04069306319721377,
						"acc_stderr,none": 0.04069306319721377,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.17482517482517482,
						"acc_norm,none": 0.17482517482517482,
						"acc_norm_stderr,none": 0.03187357652966491,
						"acc_stderr,none": 0.03187357652966491,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604674,
						"acc_stderr,none": 0.03893259610604674,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2864864864864865,
						"acc_norm,none": 0.2864864864864865,
						"acc_norm_stderr,none": 0.03333068663336699,
						"acc_stderr,none": 0.03333068663336699,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.29069767441860467,
						"acc_norm,none": 0.29069767441860467,
						"acc_norm_stderr,none": 0.03472469304477598,
						"acc_stderr,none": 0.03472469304477598,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26034063260340634,
						"acc_norm,none": 0.26034063260340634,
						"acc_norm_stderr,none": 0.021671797319809193,
						"acc_stderr,none": 0.021671797319809193,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3317757009345794,
						"acc_norm,none": 0.3317757009345794,
						"acc_norm_stderr,none": 0.03226217317322115,
						"acc_stderr,none": 0.03226217317322115,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03887917804888516,
						"acc_stderr,none": 0.03887917804888516,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3114754098360656,
						"acc_norm,none": 0.3114754098360656,
						"acc_norm_stderr,none": 0.0420996926731014,
						"acc_stderr,none": 0.0420996926731014,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.030588764516074875,
						"acc_stderr,none": 0.030588764516074875,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2722222222222222,
						"acc_norm,none": 0.2722222222222222,
						"acc_norm_stderr,none": 0.03326861086666926,
						"acc_stderr,none": 0.03326861086666926,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03294754314388876,
						"acc_stderr,none": 0.03294754314388876,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3017241379310345,
						"acc_norm,none": 0.3017241379310345,
						"acc_norm_stderr,none": 0.042802547925054606,
						"acc_stderr,none": 0.042802547925054606,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.03855289616378947,
						"acc_stderr,none": 0.03855289616378947,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.041764667586049006,
						"acc_stderr,none": 0.041764667586049006,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.24571428571428572,
						"acc_norm,none": 0.24571428571428572,
						"acc_norm_stderr,none": 0.03263687142627841,
						"acc_stderr,none": 0.03263687142627841,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2938388625592417,
						"acc_norm,none": 0.2938388625592417,
						"acc_norm_stderr,none": 0.03143379932562226,
						"acc_stderr,none": 0.03143379932562226,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.28191489361702127,
						"acc_norm,none": 0.28191489361702127,
						"acc_norm_stderr,none": 0.023234393263661213,
						"acc_stderr,none": 0.023234393263661213,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.24568965517241378,
						"acc_norm,none": 0.24568965517241378,
						"acc_norm_stderr,none": 0.028324514684171135,
						"acc_stderr,none": 0.028324514684171135,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.03398079939585585,
						"acc_stderr,none": 0.03398079939585585,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.037857144650666544,
						"acc_stderr,none": 0.037857144650666544,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.28761061946902655,
						"acc_norm,none": 0.28761061946902655,
						"acc_norm_stderr,none": 0.030176573035509174,
						"acc_stderr,none": 0.030176573035509174,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.035014387062967806,
						"acc_stderr,none": 0.035014387062967806,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.31952662721893493,
						"acc_norm,none": 0.31952662721893493,
						"acc_norm_stderr,none": 0.03597530251676528,
						"acc_stderr,none": 0.03597530251676528,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3416149068322981,
						"acc_norm,none": 0.3416149068322981,
						"acc_norm_stderr,none": 0.03749284617282493,
						"acc_stderr,none": 0.03749284617282493,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03489370652018761,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.002047167688916013,
						"mcc_stderr,none": 0.031000437414961148
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.03265986323710906,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.5930046213476445,
						"likelihood_diff_stderr,none": 0.4485120786198167,
						"pct_stereotype,none": 0.5861657722122839,
						"pct_stereotype_stderr,none": 0.08618208256220111
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.599880739415623,
						"likelihood_diff_stderr,none": 0.08457860716023233,
						"pct_stereotype,none": 0.6589147286821705,
						"pct_stereotype_stderr,none": 0.011580013978908416
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.15521978021978,
						"likelihood_diff_stderr,none": 0.3794271669810581,
						"pct_stereotype,none": 0.7362637362637363,
						"pct_stereotype_stderr,none": 0.04644942852497395
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.090909090909091,
						"likelihood_diff_stderr,none": 1.7304993785533571,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.096153846153846,
						"likelihood_diff_stderr,none": 0.6415522676525877,
						"pct_stereotype,none": 0.7538461538461538,
						"pct_stereotype_stderr,none": 0.05384615384615383
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.66328125,
						"likelihood_diff_stderr,none": 0.16880223285108623,
						"pct_stereotype,none": 0.64375,
						"pct_stereotype_stderr,none": 0.02681271031002423
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.4768518518518516,
						"likelihood_diff_stderr,none": 0.21254827993159336,
						"pct_stereotype,none": 0.6342592592592593,
						"pct_stereotype_stderr,none": 0.032847388576472056
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.8350694444444446,
						"likelihood_diff_stderr,none": 0.3420481451748029,
						"pct_stereotype,none": 0.7222222222222222,
						"pct_stereotype_stderr,none": 0.053156331218399945
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.338582677165354,
						"likelihood_diff_stderr,none": 0.14134995695321553,
						"pct_stereotype,none": 0.5866141732283464,
						"pct_stereotype_stderr,none": 0.021870065687317718
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.7466216216216215,
						"likelihood_diff_stderr,none": 0.34538188316369367,
						"pct_stereotype,none": 0.7567567567567568,
						"pct_stereotype_stderr,none": 0.040907430738609196
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.743279569892473,
						"likelihood_diff_stderr,none": 0.4524424553476681,
						"pct_stereotype,none": 0.8709677419354839,
						"pct_stereotype_stderr,none": 0.03495073154102977
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.0697368421052635,
						"likelihood_diff_stderr,none": 0.2337138701978675,
						"pct_stereotype,none": 0.6684210526315789,
						"pct_stereotype_stderr,none": 0.0342442478876195
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.5859421586165774,
						"likelihood_diff_stderr,none": 0.08612605669290072,
						"pct_stereotype,none": 0.5116279069767442,
						"pct_stereotype_stderr,none": 0.012209996095069644
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.6194444444444445,
						"likelihood_diff_stderr,none": 0.352696669040604,
						"pct_stereotype,none": 0.4444444444444444,
						"pct_stereotype_stderr,none": 0.05267171812666418
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.826923076923077,
						"likelihood_diff_stderr,none": 0.9546548474249905,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.90530303030303,
						"likelihood_diff_stderr,none": 0.43921832556828744,
						"pct_stereotype,none": 0.6363636363636364,
						"pct_stereotype_stderr,none": 0.05966637484671758
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.217289719626168,
						"likelihood_diff_stderr,none": 0.18021529078432102,
						"pct_stereotype,none": 0.514018691588785,
						"pct_stereotype_stderr,none": 0.02793986154930238
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.9239130434782608,
						"likelihood_diff_stderr,none": 0.22250867409139075,
						"pct_stereotype,none": 0.35968379446640314,
						"pct_stereotype_stderr,none": 0.030231340989680604
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.2899305555555554,
						"likelihood_diff_stderr,none": 0.4380573410947552,
						"pct_stereotype,none": 0.625,
						"pct_stereotype_stderr,none": 0.05745481997211521
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.143478260869565,
						"likelihood_diff_stderr,none": 0.16477730507275248,
						"pct_stereotype,none": 0.4260869565217391,
						"pct_stereotype_stderr,none": 0.0230815954374589
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.476086956521739,
						"likelihood_diff_stderr,none": 0.32825518657847597,
						"pct_stereotype,none": 0.6260869565217392,
						"pct_stereotype_stderr,none": 0.04531585828644964
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 4.78021978021978,
						"likelihood_diff_stderr,none": 0.3904936288706526,
						"pct_stereotype,none": 0.7692307692307693,
						"pct_stereotype_stderr,none": 0.04441155916843277
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.009566326530612,
						"likelihood_diff_stderr,none": 0.2629401041709253,
						"pct_stereotype,none": 0.6683673469387755,
						"pct_stereotype_stderr,none": 0.03371467279183503
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.05511811023622047,
						"exact_match_stderr,none": 0.0050638514892882985
					},
					"glue": {
						"acc,none": 0.4873570944877401,
						"acc_stderr,none": 0.0647330299966228,
						"alias": "glue",
						"f1,none": 0.37734063591955397,
						"f1_stderr,none": 0.001383566438455718,
						"mcc,none": 0.002047167688916013,
						"mcc_stderr,none": 0.000961027119918923
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.06823351023502654,
						"exact_match_stderr,get-answer": 0.006945358944067431
					},
					"hellaswag": {
						"acc,none": 0.5722963553077076,
						"acc_norm,none": 0.7626966739693288,
						"acc_norm_stderr,none": 0.004245602744443551,
						"acc_stderr,none": 0.004937345081868093,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.16162287034363268,
						"acc_norm,none": 0.16162287034363268,
						"acc_norm_stderr,none": 0.03761631346537425,
						"acc_stderr,none": 0.03761631346537425,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.16,
						"acc_norm,none": 0.16,
						"acc_norm_stderr,none": 0.03684529491774711,
						"acc_stderr,none": 0.03684529491774711,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.011297239823409308,
						"acc_stderr,none": 0.011297239823409308,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.148,
						"acc_norm,none": 0.148,
						"acc_norm_stderr,none": 0.011234866364235261,
						"acc_stderr,none": 0.011234866364235261,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.204,
						"acc_norm,none": 0.204,
						"acc_norm_stderr,none": 0.012749374359024393,
						"acc_stderr,none": 0.012749374359024393,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.223,
						"acc_norm,none": 0.223,
						"acc_norm_stderr,none": 0.013169830843425673,
						"acc_stderr,none": 0.013169830843425673,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.22166666666666668,
						"acc_norm,none": 0.22166666666666668,
						"acc_norm_stderr,none": 0.016971475408908445,
						"acc_stderr,none": 0.016971475408908445,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.131,
						"acc_norm,none": 0.131,
						"acc_norm_stderr,none": 0.010674874844837956,
						"acc_stderr,none": 0.010674874844837956,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.109,
						"acc_norm,none": 0.109,
						"acc_norm_stderr,none": 0.009859828407037193,
						"acc_stderr,none": 0.009859828407037193,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.146,
						"acc_norm,none": 0.146,
						"acc_norm_stderr,none": 0.011171786285496497,
						"acc_stderr,none": 0.011171786285496497,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.029832025555495228,
						"acc_stderr,none": 0.029832025555495228,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.108,
						"acc_norm,none": 0.108,
						"acc_norm_stderr,none": 0.00982000165134571,
						"acc_stderr,none": 0.00982000165134571,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.04063619567656726,
						"acc_stderr,none": 0.04063619567656726,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.04093601807403326,
						"acc_stderr,none": 0.04093601807403326,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.109,
						"acc_norm,none": 0.109,
						"acc_norm_stderr,none": 0.009859828407037181,
						"acc_stderr,none": 0.009859828407037181,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.14,
						"acc_norm,none": 0.14,
						"acc_norm_stderr,none": 0.010978183844357798,
						"acc_stderr,none": 0.010978183844357798,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.218,
						"acc_norm,none": 0.218,
						"acc_norm_stderr,none": 0.013063179040595282,
						"acc_stderr,none": 0.013063179040595282,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.149,
						"acc_norm,none": 0.149,
						"acc_norm_stderr,none": 0.011266140684632163,
						"acc_stderr,none": 0.011266140684632163,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.175,
						"acc_norm,none": 0.175,
						"acc_norm_stderr,none": 0.012021627157731982,
						"acc_stderr,none": 0.012021627157731982,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.158,
						"acc_norm,none": 0.158,
						"acc_norm_stderr,none": 0.01153989467755957,
						"acc_stderr,none": 0.01153989467755957,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.139,
						"acc_norm,none": 0.139,
						"acc_norm_stderr,none": 0.01094526376104296,
						"acc_stderr,none": 0.01094526376104296,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.171,
						"acc_norm,none": 0.171,
						"acc_norm_stderr,none": 0.011912216456264616,
						"acc_stderr,none": 0.011912216456264616,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909283,
						"acc_stderr,none": 0.04292346959909283,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.112,
						"acc_norm,none": 0.112,
						"acc_norm_stderr,none": 0.009977753031397224,
						"acc_stderr,none": 0.009977753031397224,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.136,
						"acc_norm,none": 0.136,
						"acc_norm_stderr,none": 0.010845350230472986,
						"acc_stderr,none": 0.010845350230472986,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.109,
						"acc_norm,none": 0.109,
						"acc_norm_stderr,none": 0.009859828407037186,
						"acc_stderr,none": 0.009859828407037186,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.013106173040661773,
						"acc_stderr,none": 0.013106173040661773,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.119,
						"acc_norm,none": 0.119,
						"acc_norm_stderr,none": 0.010244215145336662,
						"acc_stderr,none": 0.010244215145336662,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.203,
						"acc_norm,none": 0.203,
						"acc_norm_stderr,none": 0.012726073744598268,
						"acc_stderr,none": 0.012726073744598268,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.17666666666666667,
						"acc_norm,none": 0.17666666666666667,
						"acc_norm_stderr,none": 0.015583024214361182,
						"acc_stderr,none": 0.015583024214361182,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.011297239823409308,
						"acc_stderr,none": 0.011297239823409308,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.152,
						"acc_norm,none": 0.152,
						"acc_norm_stderr,none": 0.01135891830347529,
						"acc_stderr,none": 0.01135891830347529,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.162,
						"acc_norm,none": 0.162,
						"acc_norm_stderr,none": 0.011657267771304405,
						"acc_stderr,none": 0.011657267771304405,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.144,
						"acc_norm,none": 0.144,
						"acc_norm_stderr,none": 0.01110798754893915,
						"acc_stderr,none": 0.01110798754893915,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.0440844002276808,
						"acc_stderr,none": 0.0440844002276808,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.024698855131686858,
						"acc_stderr,none": 0.024698855131686858,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.221,
						"acc_norm,none": 0.221,
						"acc_norm_stderr,none": 0.01312750285969625,
						"acc_stderr,none": 0.01312750285969625,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.123,
						"acc_norm,none": 0.123,
						"acc_norm_stderr,none": 0.01039129342184988,
						"acc_stderr,none": 0.01039129342184988,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.167,
						"acc_norm,none": 0.167,
						"acc_norm_stderr,none": 0.011800434324644601,
						"acc_stderr,none": 0.011800434324644601,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.19,
						"acc_norm,none": 0.19,
						"acc_norm_stderr,none": 0.02780947382046009,
						"acc_stderr,none": 0.02780947382046009,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.209,
						"acc_norm,none": 0.209,
						"acc_norm_stderr,none": 0.012864077288499335,
						"acc_stderr,none": 0.012864077288499335,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.187,
						"acc_norm,none": 0.187,
						"acc_norm_stderr,none": 0.012336254828074128,
						"acc_stderr,none": 0.012336254828074128,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.205,
						"acc_norm,none": 0.205,
						"acc_norm_stderr,none": 0.028617649261360185,
						"acc_stderr,none": 0.028617649261360185,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.142,
						"acc_norm,none": 0.142,
						"acc_norm_stderr,none": 0.011043457699378248,
						"acc_stderr,none": 0.011043457699378248,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.4972593729445297,
						"acc_norm,none": 0.472,
						"acc_norm_stderr,none": 0.00049943086172345,
						"acc_stderr,none": 0.03603153727459309,
						"alias": "kobest",
						"f1,none": 0.41132274129467367,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5185185185185185,
						"acc_stderr,none": 0.013339608823275215,
						"alias": " - kobest_boolq",
						"f1,none": 0.3892116609820452,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.527,
						"acc_stderr,none": 0.015796218551302622,
						"alias": " - kobest_copa",
						"f1,none": 0.5262035481644645,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.378,
						"acc_norm,none": 0.472,
						"acc_norm_stderr,none": 0.022347949832668097,
						"acc_stderr,none": 0.021706550824518177,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.37323575096906003,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5264483627204031,
						"acc_stderr,none": 0.025090768761517872,
						"alias": " - kobest_sentineg",
						"f1,none": 0.5125653082549635,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6517562584901999,
						"acc_stderr,none": 0.016834707882291924,
						"alias": "lambada",
						"perplexity,none": 4.399679001639323,
						"perplexity_stderr,none": 0.27949501209155936
					},
					"lambada_cloze": {
						"acc,none": 0.2821657287017271,
						"acc_stderr,none": 0.0579568846515385,
						"alias": "lambada_cloze",
						"perplexity,none": 67.58735109794539,
						"perplexity_stderr,none": 20.635442097454224
					},
					"lambada_multilingual": {
						"acc,none": 0.44366388511546667,
						"acc_stderr,none": 0.08811220105550514,
						"alias": "lambada_multilingual",
						"perplexity,none": 49.825227199829776,
						"perplexity_stderr,none": 17.660399867678677
					},
					"lambada_openai": {
						"acc,none": 0.687172520861634,
						"acc_stderr,none": 0.006459477837059417,
						"alias": " - lambada_openai",
						"perplexity,none": 3.877221147872283,
						"perplexity_stderr,none": 0.08122881240478526
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.3974383854065593,
						"acc_stderr,none": 0.0068178541871044464,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 26.549393918870507,
						"perplexity_stderr,none": 0.7034674026247055
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3380555016495245,
						"acc_stderr,none": 0.0065904772529743,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 69.90825487388426,
						"perplexity_stderr,none": 4.041499841736717
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6846497186105182,
						"acc_stderr,none": 0.006473555880726443,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.8745064239924956,
						"perplexity_stderr,none": 0.08114750562939017
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.36910537550941197,
						"acc_stderr,none": 0.00672303963206429,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 67.18030832861038,
						"perplexity_stderr,none": 3.68108116491444
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.43333980205705414,
						"acc_stderr,none": 0.006903792306860549,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 42.760358147168645,
						"perplexity_stderr,none": 2.3632160727085854
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.39316902775082474,
						"acc_stderr,none": 0.006805116923096301,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 65.40270822549307,
						"perplexity_stderr,none": 3.8443886816295008
					},
					"lambada_standard": {
						"acc,none": 0.6208034154861246,
						"acc_stderr,none": 0.006759605180095811,
						"alias": " - lambada_standard",
						"perplexity,none": 4.92501612130002,
						"perplexity_stderr,none": 0.10793923623741004
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.16689307199689501,
						"acc_stderr,none": 0.005194952730632863,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 108.62530827702027,
						"perplexity_stderr,none": 3.0017193342382287
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.22519083969465647,
						"exact_match_stderr,get-answer": 0.010538641739267853
					},
					"logiqa": {
						"acc,none": 0.23195084485407066,
						"acc_norm,none": 0.271889400921659,
						"acc_norm_stderr,none": 0.017451716009436825,
						"acc_stderr,none": 0.0165552524979259,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.25254452926208654,
						"acc_norm,none": 0.2767175572519084,
						"acc_norm_stderr,none": 0.011287148180222278,
						"acc_stderr,none": 0.010961589961715609,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25996649916247905,
						"acc_norm,none": 0.2609715242881072,
						"acc_norm_stderr,none": 0.008039475906726762,
						"acc_stderr,none": 0.008029434758777935,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.5314552001694556,
						"acc_stderr,none": 0.005135702909925417,
						"alias": "mc_taco",
						"f1,none": 0.4904399907855333,
						"f1_stderr,none": 0.006607822049350872
					},
					"medmcqa": {
						"acc,none": 0.29261295720774566,
						"acc_norm,none": 0.29261295720774566,
						"acc_norm_stderr,none": 0.007035311198106624,
						"acc_stderr,none": 0.007035311198106624,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.27965435978004716,
						"acc_norm,none": 0.27965435978004716,
						"acc_norm_stderr,none": 0.012584550489971286,
						"acc_stderr,none": 0.012584550489971286,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.2913402649195272,
						"acc_stderr,none": 0.047641057901318375,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.2518518518518518,
						"acc_stderr,none": 0.037498507091740206,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.2894736842105263,
						"acc_stderr,none": 0.036906779861372814,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.048783173121456316,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3169811320754717,
						"acc_stderr,none": 0.028637235639800935,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.03852084696008534,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.12,
						"acc_stderr,none": 0.03265986323710906,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.30057803468208094,
						"acc_stderr,none": 0.0349610148119118,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.04336432707993178,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.04943110704237102,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3191489361702128,
						"acc_stderr,none": 0.030472973363380045,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2543859649122807,
						"acc_stderr,none": 0.040969851398436716,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2896551724137931,
						"acc_stderr,none": 0.03780019230438013,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2857142857142857,
						"acc_stderr,none": 0.023266512213730554,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.04006168083848877,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.267741935483871,
						"acc_stderr,none": 0.025189006660212385,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2561576354679803,
						"acc_stderr,none": 0.030712730070982592,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.0347769116216366,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.25757575757575757,
						"acc_stderr,none": 0.03115626951964684,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.31088082901554404,
						"acc_stderr,none": 0.03340361906276587,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.26666666666666666,
						"acc_stderr,none": 0.022421273612923714,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.21851851851851853,
						"acc_stderr,none": 0.025195752251823796,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.31092436974789917,
						"acc_stderr,none": 0.030066761582977927,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23178807947019867,
						"acc_stderr,none": 0.03445406271987053,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.28073394495412846,
						"acc_stderr,none": 0.019266055045871623,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.20833333333333334,
						"acc_stderr,none": 0.027696910713093926,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.29411764705882354,
						"acc_stderr,none": 0.03198001660115071,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3080168776371308,
						"acc_stderr,none": 0.030052389335605702,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.40358744394618834,
						"acc_stderr,none": 0.03292802819330313,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.32061068702290074,
						"acc_stderr,none": 0.04093329229834277,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2871413390010627,
						"acc_stderr,none": 0.04377949041651031,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.4462809917355372,
						"acc_stderr,none": 0.04537935177947879,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.37037037037037035,
						"acc_stderr,none": 0.04668408033024931,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.26380368098159507,
						"acc_stderr,none": 0.03462419931615623,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.044328040552915185,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.30097087378640774,
						"acc_stderr,none": 0.045416094465039476,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3076923076923077,
						"acc_stderr,none": 0.030236389942173092,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621505,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.3652618135376756,
						"acc_stderr,none": 0.01721853002883864,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3265895953757225,
						"acc_stderr,none": 0.02524826477424282,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23910614525139665,
						"acc_stderr,none": 0.014265554192331158,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2581699346405229,
						"acc_stderr,none": 0.025058503316958157,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.32281943997425167,
						"acc_stderr,none": 0.047811192595901685,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.31511254019292606,
						"acc_stderr,none": 0.026385273703464492,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3117283950617284,
						"acc_stderr,none": 0.02577311116963045,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2978723404255319,
						"acc_stderr,none": 0.02728160834446941,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.273142112125163,
						"acc_stderr,none": 0.011380150567830403,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.22794117647058823,
						"acc_stderr,none": 0.025483081468029804,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3006535947712418,
						"acc_stderr,none": 0.01855063450295296,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.35454545454545455,
						"acc_stderr,none": 0.04582004841505417,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.27346938775510204,
						"acc_stderr,none": 0.02853556033712844,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.29314267143321415,
						"acc_stderr,none": 0.03440263964438806,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.03333333333333335,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2648271487472249,
						"acc_stderr,none": 0.05376304865956802,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621503,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.0368078369072758,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.32748538011695905,
						"acc_stderr,none": 0.035993357714560276,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3784004075394804,
						"acc_stderr,none": 0.00489562485968903,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.37510170870626525,
						"acc_stderr,none": 0.004882928238617845,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6666666666666666,
						"acc_stderr,none": 0.023366654574426104,
						"alias": "mrpc",
						"f1,none": 0.793939393939394,
						"f1_stderr,none": 0.017278876526381458
					},
					"multimedqa": {
						"acc,none": 0.3227821149751597,
						"acc_norm,none": 0.28858990000342066,
						"acc_norm_stderr,none": 9.102416685018394e-05,
						"acc_stderr,none": 0.11416116085430851,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5719884488448845,
						"acc_stderr,none": 0.007106976252751528,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7075808894217149,
						"mrr_stderr,none": 0.010245950035451257,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.42437923250564336,
						"r@2_stderr,none": 0.01661397885056347
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6563205435098428,
						"mrr_stderr,none": 0.010445798801530239,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.45598194130925507,
						"r@2_stderr,none": 0.016742058088832147
					},
					"openbookqa": {
						"acc,none": 0.322,
						"acc_norm,none": 0.426,
						"acc_norm_stderr,none": 0.022136577335085637,
						"acc_stderr,none": 0.020916668330019886,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.3695,
						"acc_stderr,none": 0.010795515113846481,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.316,
						"acc_stderr,none": 0.010398368286972359,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3485,
						"acc_stderr,none": 0.010657423015563744,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.01118233080628221,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5435,
						"acc_stderr,none": 0.011140733053371404,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.482,
						"acc_stderr,none": 0.011175886999478619,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4935,
						"acc_stderr,none": 0.011182191006142296,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4352857142857143,
						"acc_stderr,none": 0.0615743455578487,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7932535364526659,
						"acc_norm,none": 0.8068552774755169,
						"acc_norm_stderr,none": 0.009210530962579793,
						"acc_stderr,none": 0.009448665514183273,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2571520068317677,
						"acc_norm,none": 0.3018253629376601,
						"acc_norm_stderr,none": 0.0033537682658879306,
						"acc_stderr,none": 0.003193140936025412,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.766,
						"acc_stderr,none": 0.01895274156489368,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7318073377630966,
						"acc_norm,none": 0.6103462487265284,
						"acc_norm_stderr,none": 0.004492865877393056,
						"acc_stderr,none": 0.1426577222241238,
						"alias": "pythia",
						"bits_per_byte,none": 0.6201211884072789,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5370042863816313,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.877221147872283,
						"perplexity_stderr,none": 0.08122881240478526,
						"word_perplexity,none": 9.959459472125278,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3971631205673759,
						"acc_norm,none": 0.48226950354609927,
						"acc_norm_stderr,none": 0.053804311014196526,
						"acc_stderr,none": 0.041826155148564415,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.45,
						"acc_norm,none": 0.575,
						"acc_norm_stderr,none": 0.04531634835874828,
						"acc_stderr,none": 0.04560517440787952,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.375,
						"acc_norm,none": 0.50625,
						"acc_norm_stderr,none": 0.03964948130713095,
						"acc_stderr,none": 0.03839344480212195,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3873239436619718,
						"acc_norm,none": 0.4295774647887324,
						"acc_norm_stderr,none": 0.02942563643537582,
						"acc_stderr,none": 0.028957389575950964,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.514003294892916,
						"acc_stderr,none": 0.006762756741887002,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5289883749690824,
						"acc_stderr,none": 0.0024825178360128874,
						"alias": "qqp",
						"f1,none": 0.37381210746111604,
						"f1_stderr,none": 0.0035433628775402024
					},
					"race": {
						"acc,none": 0.384688995215311,
						"acc_stderr,none": 0.015057468843874156,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.628158844765343,
						"acc_stderr,none": 0.02909101849221745,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.939,
						"acc_norm,none": 0.894,
						"acc_norm_stderr,none": 0.00973955126578514,
						"acc_stderr,none": 0.007572076091557431,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.628158844765343,
						"acc_stderr,none": 0.029091018492217444,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.7327981651376146,
						"acc_stderr,none": 0.014993493204432527,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5616315105468359,
						"acc_norm,none": 0.7568229531140658,
						"acc_norm_stderr,none": 0.003033116975660088,
						"acc_stderr,none": 0.003508133562895561,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5750557385777512,
						"acc_stderr,none": 0.03710239568682007,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5221354166666666,
						"acc_stderr,none": 0.0049993490844366896,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.6675787980135807,
						"acc_stderr,none": 0.00474269361846193,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5373529411764706,
						"acc_stderr,none": 0.004937145452057436,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.2708315899837244,
						"acc_stderr,none": 0.0011717067589892365,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.016095884155386844,
						"bleu_diff,none": -10.064418516236938,
						"bleu_diff_stderr,none": 0.848497644094101,
						"bleu_max,none": 24.678819878959306,
						"bleu_max_stderr,none": 0.7719089135650388,
						"rouge1_acc,none": 0.26560587515299877,
						"rouge1_acc_stderr,none": 0.01546102762725359,
						"rouge1_diff,none": -12.27036240470745,
						"rouge1_diff_stderr,none": 0.9300030207544127,
						"rouge1_max,none": 48.689929876362534,
						"rouge1_max_stderr,none": 0.8981865263191734,
						"rouge2_acc,none": 0.20685434516523868,
						"rouge2_acc_stderr,none": 0.014179591496728327,
						"rouge2_diff,none": -14.882571896093566,
						"rouge2_diff_stderr,none": 1.098599472827099,
						"rouge2_max,none": 32.38876809163368,
						"rouge2_max_stderr,none": 1.0196427561268149,
						"rougeL_acc,none": 0.2631578947368421,
						"rougeL_acc_stderr,none": 0.015415241740237012,
						"rougeL_diff,none": -12.754023969579247,
						"rougeL_diff_stderr,none": 0.938647677805448,
						"rougeL_max,none": 46.01510435568951,
						"rougeL_max_stderr,none": 0.9047163127400675
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.016095884155386844,
						"bleu_diff,none": -10.064418516236938,
						"bleu_diff_stderr,none": 0.848497644094101,
						"bleu_max,none": 24.678819878959306,
						"bleu_max_stderr,none": 0.7719089135650388,
						"rouge1_acc,none": 0.26560587515299877,
						"rouge1_acc_stderr,none": 0.01546102762725359,
						"rouge1_diff,none": -12.27036240470745,
						"rouge1_diff_stderr,none": 0.9300030207544127,
						"rouge1_max,none": 48.689929876362534,
						"rouge1_max_stderr,none": 0.8981865263191734,
						"rouge2_acc,none": 0.20685434516523868,
						"rouge2_acc_stderr,none": 0.014179591496728327,
						"rouge2_diff,none": -14.882571896093566,
						"rouge2_diff_stderr,none": 1.098599472827099,
						"rouge2_max,none": 32.38876809163368,
						"rouge2_max_stderr,none": 1.0196427561268149,
						"rougeL_acc,none": 0.2631578947368421,
						"rougeL_acc_stderr,none": 0.015415241740237012,
						"rougeL_diff,none": -12.754023969579247,
						"rougeL_diff_stderr,none": 0.938647677805448,
						"rougeL_max,none": 46.01510435568951,
						"rougeL_max_stderr,none": 0.9047163127400675
					},
					"truthfulqa_mc1": {
						"acc,none": 0.20807833537331702,
						"acc_stderr,none": 0.014210503473576634,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3335848445941318,
						"acc_stderr,none": 0.013096280756303268,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.05511811023622047,
						"exact_match_stderr,none": 0.0050638514892882985
					},
					"wic": {
						"acc,none": 0.4890282131661442,
						"acc_stderr,none": 0.01980595108597941,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6201211884072789,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5370042863816313,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 9.959459472125278,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.681136543014996,
						"acc_stderr,none": 0.013097928420088771,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4788732394366197,
						"acc_stderr,none": 0.05970805879899504,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6346153846153846,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8608058608058609,
						"acc_stderr,none": 0.02098836607085098,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.04233919229869232,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.476,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.02218721580302901,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.594,
						"acc_stderr,none": 0.021983962090086337,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.488,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044296,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.022279694107843424,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.022311333245289666,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.626,
						"acc_stderr,none": 0.02166071034720448,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.39836680053547524,
						"acc_stderr,none": 0.05310877280620812,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3369477911646586,
						"acc_stderr,none": 0.009474203778757708,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3823293172690763,
						"acc_stderr,none": 0.009740580649033707,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4706827309236948,
						"acc_stderr,none": 0.010004830045543983,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3542168674698795,
						"acc_stderr,none": 0.009586620142951844,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.551004016064257,
						"acc_stderr,none": 0.009969793477240826,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.43775100401606426,
						"acc_stderr,none": 0.00994409973429016,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4791164658634538,
						"acc_stderr,none": 0.010013327358568525,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.37028112449799194,
						"acc_stderr,none": 0.00967891540984029,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.44497991967871486,
						"acc_stderr,none": 0.009961210239024642,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667058,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3670682730923695,
						"acc_stderr,none": 0.009661385450096037,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.38353413654618473,
						"acc_stderr,none": 0.00974639661344378,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3353413654618474,
						"acc_stderr,none": 0.009463034891512706,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.37389558232931724,
						"acc_stderr,none": 0.009698087600721305,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3546184738955823,
						"acc_stderr,none": 0.00958907012786187,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5627820227423138,
						"acc_stderr,none": 0.07635437818393377,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.485771012574454,
						"acc_stderr,none": 0.012861913999596122,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7802779616148247,
						"acc_stderr,none": 0.01065547970935364,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6565188616810059,
						"acc_stderr,none": 0.012220432513619244,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5089344804765056,
						"acc_stderr,none": 0.0128650709173208,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5201853077432164,
						"acc_stderr,none": 0.012856635706498292,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5493050959629384,
						"acc_stderr,none": 0.012804412720126673,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4884182660489742,
						"acc_stderr,none": 0.012863672949335892,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5744540039708802,
						"acc_stderr,none": 0.012723670419166324,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.500330906684315,
						"acc_stderr,none": 0.01286712249849342,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5307743216412971,
						"acc_stderr,none": 0.012842730340585787,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5956320317670417,
						"acc_stderr,none": 0.012629580396570935,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7691616093504158,
						"acc_stderr,none": 0.05177053504539272,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8683870967741936,
						"acc_stderr,none": 0.00701274187412196,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6746987951807228,
						"acc_stderr,none": 0.051735765211123864,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6131386861313869,
						"acc_stderr,none": 0.01573527205814044,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6768060836501901,
						"acc_stderr,none": 0.02889435936291791,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6857142857142857,
						"acc_stderr,none": 0.026198057744026414,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7242063492063492,
						"acc_stderr,none": 0.019926879903661536,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "mosaicml/mpt-7b"
	},
	"mosaicml/mpt-7b-chat": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6355693348365277,
						"acc_norm,none": 0.6068207440811725,
						"acc_norm_stderr,none": 0.04218339056759716,
						"acc_stderr,none": 0.05358744129518246,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3465625,
						"acc_stderr,none": 0.014638439135408136,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.14145,
						"acc_stderr,none": 0.1687888384789641,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.7966268656716418,
						"acc_stderr,none": 0.14679643966921255,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.29494799405646366,
						"acc_norm,none": 0.29494799405646366,
						"acc_norm_stderr,none": 0.12276475136718207,
						"acc_stderr,none": 0.12276475136718207,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.28587463305128635,
						"acc_norm,none": 0.28587463305128635,
						"acc_norm_stderr,none": 0.054549562327494916,
						"acc_stderr,none": 0.054549562327494916,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.956441935002982,
						"likelihood_diff_stderr,none": 0.6945510371519042,
						"pct_stereotype,none": 0.5787119856887298,
						"pct_stereotype_stderr,none": 0.07576857457996534
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.028051181102362203,
						"exact_match_stderr,none": 0.0036638890384170075
					},
					"glue": {
						"acc,none": 0.4981341061869073,
						"acc_stderr,none": 0.06229194596865805,
						"alias": "glue",
						"f1,none": 0.49302658302405783,
						"f1_stderr,none": 0.000853164778002517,
						"mcc,none": 0.09709025615823998,
						"mcc_stderr,none": 0.0009560515484120038
					},
					"kmmlu": {
						"acc,none": 0.18004620271440946,
						"acc_norm,none": 0.18004620271440946,
						"acc_norm_stderr,none": 0.03835010467620738,
						"acc_stderr,none": 0.03835010467620738,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.49747862310896734,
						"acc_norm,none": 0.428,
						"acc_norm_stderr,none": 0.0004906132264529048,
						"acc_stderr,none": 0.03897165775489114,
						"alias": "kobest",
						"f1,none": 0.4053578860148066,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6124587618862798,
						"acc_stderr,none": 0.0187324309858424,
						"alias": "lambada",
						"perplexity,none": 4.962766493318823,
						"perplexity_stderr,none": 0.4476374693887904
					},
					"lambada_cloze": {
						"acc,none": 0.16922181253638657,
						"acc_stderr,none": 0.03924532961242276,
						"alias": "lambada_cloze",
						"perplexity,none": 151.06512009351397,
						"perplexity_stderr,none": 53.82900314323085
					},
					"lambada_multilingual": {
						"acc,none": 0.3987968173879294,
						"acc_stderr,none": 0.07219791091903441,
						"alias": "lambada_multilingual",
						"perplexity,none": 125.38377800406174,
						"perplexity_stderr,none": 52.40235622230799
					},
					"mmlu": {
						"acc,none": 0.387978920381712,
						"acc_stderr,none": 0.08113348217717196,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.34962805526036134,
						"acc_stderr,none": 0.07002839597187732,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.4486643064048922,
						"acc_stderr,none": 0.06485219735562017,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4413389665258369,
						"acc_stderr,none": 0.07405794463654024,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.08342086024648186,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.34946770759403833,
						"acc_norm,none": 0.30366458087912734,
						"acc_norm_stderr,none": 9.344719787491192e-05,
						"acc_stderr,none": 0.09857132851700397,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4455,
						"acc_stderr,none": 0.042824168951484,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7174533062085588,
						"acc_norm,none": 0.6109967475656303,
						"acc_norm_stderr,none": 0.003950076888378656,
						"acc_stderr,none": 0.1400658818845559,
						"alias": "pythia",
						"bits_per_byte,none": 0.704247037048816,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6292940912260545,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.129743234473909,
						"perplexity_stderr,none": 0.1227205231848723,
						"word_perplexity,none": 13.603691301331226,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4078014184397163,
						"acc_norm,none": 0.4716312056737589,
						"acc_norm_stderr,none": 0.04608954817439849,
						"acc_stderr,none": 0.03787818478503423,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.7578449968387075,
						"acc_stderr,none": 0.08602125082153045,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3569589444747222,
						"acc_stderr,none": 0.04452645811644175,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3623011015911873,
						"bleu_acc_stderr,none": 0.00028313604580514556,
						"bleu_diff,none": -2.685211026642102,
						"bleu_diff_stderr,none": 0.7273648142299285,
						"bleu_max,none": 25.017825146929578,
						"bleu_max_stderr,none": 0.6088844370863896,
						"rouge1_acc,none": 0.35495716034271724,
						"rouge1_acc_stderr,none": 0.00028059139051979436,
						"rouge1_diff,none": -3.536793507808863,
						"rouge1_diff_stderr,none": 1.1025403294766978,
						"rouge1_max,none": 49.04286032061536,
						"rouge1_max_stderr,none": 0.7988980441653172,
						"rouge2_acc,none": 0.2974296205630355,
						"rouge2_acc_stderr,none": 0.0002560848546259371,
						"rouge2_diff,none": -4.3183258454722315,
						"rouge2_diff_stderr,none": 1.3814913542224783,
						"rouge2_max,none": 33.91839802600762,
						"rouge2_max_stderr,none": 1.040522765352985,
						"rougeL_acc,none": 0.35128518971848227,
						"rougeL_acc_stderr,none": 0.000279269491670262,
						"rougeL_diff,none": -3.527270175981697,
						"rougeL_diff_stderr,none": 1.1189000914073388,
						"rougeL_max,none": 46.450018931777564,
						"rougeL_max_stderr,none": 0.8070054248391189
					},
					"xcopa": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.04705888524334253,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.39400267737617134,
						"acc_stderr,none": 0.04975289821772939,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5640454846278804,
						"acc_stderr,none": 0.06251268358839181,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7446617217352214,
						"acc_stderr,none": 0.07732550176464906,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6355693348365277,
						"acc_norm,none": 0.6068207440811725,
						"acc_norm_stderr,none": 0.04218339056759716,
						"acc_stderr,none": 0.05358744129518246,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3465625,
						"acc_stderr,none": 0.014638439135408136,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.343,
						"acc_stderr,none": 0.015019206922356951,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.347,
						"acc_stderr,none": 0.015060472031706617,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3491666666666667,
						"acc_stderr,none": 0.013767075395077245,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.40955631399317405,
						"acc_norm,none": 0.431740614334471,
						"acc_norm_stderr,none": 0.014474591427196204,
						"acc_stderr,none": 0.014370358632472451,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7470538720538721,
						"acc_norm,none": 0.6931818181818182,
						"acc_norm_stderr,none": 0.009463075835198944,
						"acc_stderr,none": 0.008919862739165613,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.14145,
						"acc_stderr,none": 0.1687888384789641,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0475,
						"acc_stderr,none": 0.00475743540111671,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.6065,
						"acc_stderr,none": 0.010926507643554023,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.1105,
						"acc_stderr,none": 0.007012093819243017,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.421,
						"acc_stderr,none": 0.011042665902539788,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.118,
						"acc_stderr,none": 0.0072155410064671695,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.096,
						"acc_stderr,none": 0.006588907864997597,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.008,
						"acc_stderr,none": 0.0019924821184884632,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0055,
						"acc_stderr,none": 0.0016541593398342208,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521472,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.00911062906724512,
						"acc_stderr,none": 0.0019794545300791827,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.7966268656716418,
						"acc_stderr,none": 0.14679643966921255,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847165,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.002637794146243766,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.0034449771940998158,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.78,
						"acc_stderr,none": 0.013106173040661775,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.722,
						"acc_stderr,none": 0.01417451646148524,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.014046255632633913,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.01564508768811381,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.762,
						"acc_stderr,none": 0.01347358666196722,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.827,
						"acc_stderr,none": 0.011967214137559929,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611474,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.0056518088204523705,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.00807249435832349,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315151,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333433,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340992,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897894,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.01120341539516033,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.593,
						"acc_stderr,none": 0.015543249100255544,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.01312750285969625,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.838,
						"acc_stderr,none": 0.01165726777130442,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866437,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.737,
						"acc_stderr,none": 0.013929286594259726,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298393,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.642,
						"acc_stderr,none": 0.015167928865407559,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665537,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.694,
						"acc_stderr,none": 0.014580006055436969,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.642,
						"acc_stderr,none": 0.01516792886540756,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.012486268734370148,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074792,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.826,
						"acc_stderr,none": 0.011994493230973425,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855768,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904614,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.531,
						"acc_stderr,none": 0.015788865959539003,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.00936368937324812,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.576,
						"acc_stderr,none": 0.015635487471405193,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.557,
						"acc_stderr,none": 0.015716169953204105,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.646,
						"acc_stderr,none": 0.015129868238451773,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.674,
						"acc_stderr,none": 0.01483050720454104,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.836,
						"acc_stderr,none": 0.011715000693181321,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.011234866364235258,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.00985982840703719,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.013807775152234194,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000117,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.0111717862854965,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987295,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.839,
						"acc_stderr,none": 0.011628164696727186,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.664,
						"acc_stderr,none": 0.014944140233795027,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.571,
						"acc_stderr,none": 0.01565899754787024,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397243,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.010878848714333304,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.0042063872496114875,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.014297146862517906,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.469,
						"acc_stderr,none": 0.015788865959539006,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.736,
						"acc_stderr,none": 0.01394627184944047,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.723,
						"acc_stderr,none": 0.014158794845306263,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.64,
						"acc_stderr,none": 0.015186527932040122,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.802,
						"acc_stderr,none": 0.012607733934175313,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.823,
						"acc_stderr,none": 0.012075463420375061,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.01159890229868901,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.0111717862854965,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621233,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333456,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427419,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571412,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.265,
						"acc_stderr,none": 0.013963164754809956,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.014498627873361427,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7657492354740061,
						"acc_stderr,none": 0.007407579721747382,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.48214285714285715,
						"acc_stderr,none": 0.06737697508644648,
						"alias": "cb",
						"f1,none": 0.28777777777777774,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.29494799405646366,
						"acc_norm,none": 0.29494799405646366,
						"acc_norm_stderr,none": 0.12276475136718207,
						"acc_stderr,none": 0.12276475136718207,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.06652247352247599,
						"acc_stderr,none": 0.06652247352247599,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122594,
						"acc_stderr,none": 0.08503766788122594,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.07226812131946557,
						"acc_stderr,none": 0.07226812131946557,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.425531914893617,
						"acc_norm,none": 0.425531914893617,
						"acc_norm_stderr,none": 0.07289875413448858,
						"acc_stderr,none": 0.07289875413448858,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252413,
						"acc_stderr,none": 0.09477598811252413,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.057814497055572435,
						"acc_stderr,none": 0.057814497055572435,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894599,
						"acc_stderr,none": 0.10540925533894599,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063836,
						"acc_stderr,none": 0.07213122508063836,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.085947008518708,
						"acc_stderr,none": 0.085947008518708,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.09176629354822471,
						"acc_stderr,none": 0.09176629354822471,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.10094660663590604,
						"acc_stderr,none": 0.10094660663590604,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271772,
						"acc_stderr,none": 0.10163945352271772,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.09829463743659808,
						"acc_stderr,none": 0.09829463743659808,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.11470786693528086,
						"acc_stderr,none": 0.11470786693528086,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.15075567228888181,
						"acc_stderr,none": 0.15075567228888181,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522557,
						"acc_stderr,none": 0.11369720523522557,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.6086956521739131,
						"acc_norm,none": 0.6086956521739131,
						"acc_norm_stderr,none": 0.10405096111532161,
						"acc_stderr,none": 0.10405096111532161,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.06652247352247599,
						"acc_stderr,none": 0.06652247352247599,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.11433239009500591,
						"acc_stderr,none": 0.11433239009500591,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.41379310344827586,
						"acc_norm,none": 0.41379310344827586,
						"acc_norm_stderr,none": 0.0930760769837004,
						"acc_stderr,none": 0.0930760769837004,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.3469387755102041,
						"acc_norm,none": 0.3469387755102041,
						"acc_norm_stderr,none": 0.06870411522695291,
						"acc_stderr,none": 0.06870411522695291,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.3409090909090909,
						"acc_norm,none": 0.3409090909090909,
						"acc_norm_stderr,none": 0.07228658768525041,
						"acc_stderr,none": 0.07228658768525041,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.1956521739130435,
						"acc_norm,none": 0.1956521739130435,
						"acc_norm_stderr,none": 0.05913682829884973,
						"acc_stderr,none": 0.05913682829884973,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.28587463305128635,
						"acc_norm,none": 0.28587463305128635,
						"acc_norm_stderr,none": 0.054549562327494916,
						"acc_stderr,none": 0.054549562327494916,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676975,
						"acc_stderr,none": 0.03410167836676975,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.20945945945945946,
						"acc_norm,none": 0.20945945945945946,
						"acc_norm_stderr,none": 0.033562429827632696,
						"acc_stderr,none": 0.033562429827632696,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.2875,
						"acc_norm,none": 0.2875,
						"acc_norm_stderr,none": 0.03589325106058396,
						"acc_stderr,none": 0.03589325106058396,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0347769116216366,
						"acc_stderr,none": 0.0347769116216366,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.3397129186602871,
						"acc_norm,none": 0.3397129186602871,
						"acc_norm_stderr,none": 0.03283906353745932,
						"acc_stderr,none": 0.03283906353745932,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.03612181848191273,
						"acc_stderr,none": 0.03612181848191273,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3511450381679389,
						"acc_norm,none": 0.3511450381679389,
						"acc_norm_stderr,none": 0.04186445163013751,
						"acc_stderr,none": 0.04186445163013751,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.19852941176470587,
						"acc_norm,none": 0.19852941176470587,
						"acc_norm_stderr,none": 0.034331228029202236,
						"acc_stderr,none": 0.034331228029202236,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.34579439252336447,
						"acc_norm,none": 0.34579439252336447,
						"acc_norm_stderr,none": 0.0461969359662258,
						"acc_stderr,none": 0.0461969359662258,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.26006191950464397,
						"acc_norm,none": 0.26006191950464397,
						"acc_norm_stderr,none": 0.024446018457216463,
						"acc_stderr,none": 0.024446018457216463,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.03096451792692339,
						"acc_stderr,none": 0.03096451792692339,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.26256983240223464,
						"acc_norm,none": 0.26256983240223464,
						"acc_norm_stderr,none": 0.03298168673967123,
						"acc_stderr,none": 0.03298168673967123,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.23628691983122363,
						"acc_norm,none": 0.23628691983122363,
						"acc_norm_stderr,none": 0.027652153144159263,
						"acc_stderr,none": 0.027652153144159263,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.040225592469367126,
						"acc_stderr,none": 0.040225592469367126,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.27102803738317754,
						"acc_norm,none": 0.27102803738317754,
						"acc_norm_stderr,none": 0.04317273776566669,
						"acc_stderr,none": 0.04317273776566669,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.04621678759968267,
						"acc_stderr,none": 0.04621678759968267,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.3148148148148148,
						"acc_norm,none": 0.3148148148148148,
						"acc_norm_stderr,none": 0.04489931073591312,
						"acc_stderr,none": 0.04489931073591312,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.29523809523809524,
						"acc_norm,none": 0.29523809523809524,
						"acc_norm_stderr,none": 0.044729159560441434,
						"acc_stderr,none": 0.044729159560441434,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.04396093377439376,
						"acc_stderr,none": 0.04396093377439376,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2600732600732601,
						"acc_norm,none": 0.2600732600732601,
						"acc_norm_stderr,none": 0.026598537627601476,
						"acc_stderr,none": 0.026598537627601476,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.27450980392156865,
						"acc_norm,none": 0.27450980392156865,
						"acc_norm_stderr,none": 0.03132179803083289,
						"acc_stderr,none": 0.03132179803083289,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2982456140350877,
						"acc_norm,none": 0.2982456140350877,
						"acc_norm_stderr,none": 0.03508771929824564,
						"acc_stderr,none": 0.03508771929824564,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.03558926157606755,
						"acc_stderr,none": 0.03558926157606755,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2949640287769784,
						"acc_norm,none": 0.2949640287769784,
						"acc_norm_stderr,none": 0.03881956126735708,
						"acc_stderr,none": 0.03881956126735708,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.03583711288976435,
						"acc_stderr,none": 0.03583711288976435,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.3496932515337423,
						"acc_norm,none": 0.3496932515337423,
						"acc_norm_stderr,none": 0.037466683254700206,
						"acc_stderr,none": 0.037466683254700206,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.29651162790697677,
						"acc_norm,none": 0.29651162790697677,
						"acc_norm_stderr,none": 0.03492619473255953,
						"acc_stderr,none": 0.03492619473255953,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.1865079365079365,
						"acc_norm,none": 0.1865079365079365,
						"acc_norm_stderr,none": 0.024586032873566883,
						"acc_stderr,none": 0.024586032873566883,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.03191178226713547,
						"acc_stderr,none": 0.03191178226713547,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3697478991596639,
						"acc_norm,none": 0.3697478991596639,
						"acc_norm_stderr,none": 0.03135709599613591,
						"acc_stderr,none": 0.03135709599613591,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.21304347826086956,
						"acc_norm,none": 0.21304347826086956,
						"acc_norm_stderr,none": 0.027057754389936208,
						"acc_stderr,none": 0.027057754389936208,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03944624162501117,
						"acc_stderr,none": 0.03944624162501117,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.40559440559440557,
						"acc_norm,none": 0.40559440559440557,
						"acc_norm_stderr,none": 0.04120436731133787,
						"acc_stderr,none": 0.04120436731133787,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2897727272727273,
						"acc_norm,none": 0.2897727272727273,
						"acc_norm_stderr,none": 0.03429323080239875,
						"acc_stderr,none": 0.03429323080239875,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2953020134228188,
						"acc_norm,none": 0.2953020134228188,
						"acc_norm_stderr,none": 0.037497633645270485,
						"acc_stderr,none": 0.037497633645270485,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552487,
						"acc_stderr,none": 0.03703667194552487,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2457627118644068,
						"acc_norm,none": 0.2457627118644068,
						"acc_norm_stderr,none": 0.03980329854920432,
						"acc_stderr,none": 0.03980329854920432,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.29878048780487804,
						"acc_norm,none": 0.29878048780487804,
						"acc_norm_stderr,none": 0.035851663369096606,
						"acc_stderr,none": 0.035851663369096606,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.040693063197213775,
						"acc_stderr,none": 0.040693063197213775,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.27972027972027974,
						"acc_norm,none": 0.27972027972027974,
						"acc_norm_stderr,none": 0.03766763889539855,
						"acc_stderr,none": 0.03766763889539855,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.31746031746031744,
						"acc_norm,none": 0.31746031746031744,
						"acc_norm_stderr,none": 0.04163453031302859,
						"acc_stderr,none": 0.04163453031302859,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.032739439990023544,
						"acc_stderr,none": 0.032739439990023544,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.3372093023255814,
						"acc_norm,none": 0.3372093023255814,
						"acc_norm_stderr,none": 0.03615263198871634,
						"acc_stderr,none": 0.03615263198871634,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.2846715328467153,
						"acc_norm,none": 0.2846715328467153,
						"acc_norm_stderr,none": 0.02228603692971729,
						"acc_stderr,none": 0.02228603692971729,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.411214953271028,
						"acc_norm,none": 0.411214953271028,
						"acc_norm_stderr,none": 0.03371498987315741,
						"acc_stderr,none": 0.03371498987315741,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.22764227642276422,
						"acc_norm,none": 0.22764227642276422,
						"acc_norm_stderr,none": 0.037962586241752624,
						"acc_stderr,none": 0.037962586241752624,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2786885245901639,
						"acc_norm,none": 0.2786885245901639,
						"acc_norm_stderr,none": 0.04075944659069251,
						"acc_stderr,none": 0.04075944659069251,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2761904761904762,
						"acc_norm,none": 0.2761904761904762,
						"acc_norm_stderr,none": 0.03092739584327575,
						"acc_stderr,none": 0.03092739584327575,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3111111111111111,
						"acc_norm,none": 0.3111111111111111,
						"acc_norm_stderr,none": 0.03460236918732729,
						"acc_stderr,none": 0.03460236918732729,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.31216931216931215,
						"acc_norm,none": 0.31216931216931215,
						"acc_norm_stderr,none": 0.03379535035917228,
						"acc_stderr,none": 0.03379535035917228,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.04314091325318788,
						"acc_stderr,none": 0.04314091325318788,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.31724137931034485,
						"acc_norm,none": 0.31724137931034485,
						"acc_norm_stderr,none": 0.038783523721386215,
						"acc_stderr,none": 0.038783523721386215,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.3142857142857143,
						"acc_norm,none": 0.3142857142857143,
						"acc_norm_stderr,none": 0.045521571818039494,
						"acc_stderr,none": 0.045521571818039494,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2914285714285714,
						"acc_norm,none": 0.2914285714285714,
						"acc_norm_stderr,none": 0.03444952656229018,
						"acc_stderr,none": 0.03444952656229018,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.32701421800947866,
						"acc_norm,none": 0.32701421800947866,
						"acc_norm_stderr,none": 0.032372527979102124,
						"acc_stderr,none": 0.032372527979102124,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2393617021276596,
						"acc_norm,none": 0.2393617021276596,
						"acc_norm_stderr,none": 0.022034377848093537,
						"acc_stderr,none": 0.022034377848093537,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.27155172413793105,
						"acc_norm,none": 0.27155172413793105,
						"acc_norm_stderr,none": 0.02926305423393191,
						"acc_stderr,none": 0.02926305423393191,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2988505747126437,
						"acc_norm,none": 0.2988505747126437,
						"acc_norm_stderr,none": 0.03480240745663784,
						"acc_stderr,none": 0.03480240745663784,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.362962962962963,
						"acc_norm,none": 0.362962962962963,
						"acc_norm_stderr,none": 0.041539484047424,
						"acc_stderr,none": 0.041539484047424,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3274336283185841,
						"acc_norm,none": 0.3274336283185841,
						"acc_norm_stderr,none": 0.031285129400738305,
						"acc_stderr,none": 0.031285129400738305,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.035886248000917075,
						"acc_stderr,none": 0.035886248000917075,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.03369553691877716,
						"acc_stderr,none": 0.03369553691877716,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2732919254658385,
						"acc_norm,none": 0.2732919254658385,
						"acc_norm_stderr,none": 0.0352316839773709,
						"acc_stderr,none": 0.0352316839773709,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.09709025615823998,
						"mcc_stderr,none": 0.030920083253639596
					},
					"copa": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.03379976689896309,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.956441935002982,
						"likelihood_diff_stderr,none": 0.6945510371519042,
						"pct_stereotype,none": 0.5787119856887298,
						"pct_stereotype_stderr,none": 0.07576857457996534
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 4.70989862850328,
						"likelihood_diff_stderr,none": 0.1074561685876851,
						"pct_stereotype,none": 0.6249254621347644,
						"pct_stereotype_stderr,none": 0.011825946073917681
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.964285714285714,
						"likelihood_diff_stderr,none": 0.4689955531616481,
						"pct_stereotype,none": 0.6263736263736264,
						"pct_stereotype_stderr,none": 0.0509934316638677
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 4.8522727272727275,
						"likelihood_diff_stderr,none": 1.6676073447302717,
						"pct_stereotype,none": 0.5454545454545454,
						"pct_stereotype_stderr,none": 0.1574591643244434
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 7.915384615384616,
						"likelihood_diff_stderr,none": 0.7454242815118486,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.057692307692307675
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 3.55234375,
						"likelihood_diff_stderr,none": 0.221975039639235,
						"pct_stereotype,none": 0.578125,
						"pct_stereotype_stderr,none": 0.027650782660529012
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 4.516782407407407,
						"likelihood_diff_stderr,none": 0.2825715473022958,
						"pct_stereotype,none": 0.6064814814814815,
						"pct_stereotype_stderr,none": 0.03331747876370312
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.675347222222222,
						"likelihood_diff_stderr,none": 0.4313341198081708,
						"pct_stereotype,none": 0.7361111111111112,
						"pct_stereotype_stderr,none": 0.05230618728513981
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 4.298228346456693,
						"likelihood_diff_stderr,none": 0.17416751710085326,
						"pct_stereotype,none": 0.562992125984252,
						"pct_stereotype_stderr,none": 0.02202884929608508
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.9324324324324325,
						"likelihood_diff_stderr,none": 0.4108878038168192,
						"pct_stereotype,none": 0.7387387387387387,
						"pct_stereotype_stderr,none": 0.04188770861432397
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 6.629032258064516,
						"likelihood_diff_stderr,none": 0.5645123722575858,
						"pct_stereotype,none": 0.8387096774193549,
						"pct_stereotype_stderr,none": 0.03834564688497146
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 5.864473684210527,
						"likelihood_diff_stderr,none": 0.33199177390172385,
						"pct_stereotype,none": 0.6684210526315789,
						"pct_stereotype_stderr,none": 0.0342442478876195
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 5.200767740011926,
						"likelihood_diff_stderr,none": 0.11261196817756662,
						"pct_stereotype,none": 0.5324985092426953,
						"pct_stereotype_stderr,none": 0.0121874736863312
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 4.858333333333333,
						"likelihood_diff_stderr,none": 0.4428135064540458,
						"pct_stereotype,none": 0.4666666666666667,
						"pct_stereotype_stderr,none": 0.05288198530254015
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 5.576923076923077,
						"likelihood_diff_stderr,none": 1.3108229870402248,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 7.795454545454546,
						"likelihood_diff_stderr,none": 0.7715231259555978,
						"pct_stereotype,none": 0.5757575757575758,
						"pct_stereotype_stderr,none": 0.06130137276858363
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 4.580218068535825,
						"likelihood_diff_stderr,none": 0.23761929051906433,
						"pct_stereotype,none": 0.4984423676012461,
						"pct_stereotype_stderr,none": 0.027950714088670347
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 5.25494071146245,
						"likelihood_diff_stderr,none": 0.2759503914609315,
						"pct_stereotype,none": 0.31620553359683795,
						"pct_stereotype_stderr,none": 0.029291880485542005
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 4.65625,
						"likelihood_diff_stderr,none": 0.49092999787443126,
						"pct_stereotype,none": 0.5416666666666666,
						"pct_stereotype_stderr,none": 0.05913268547421811
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 4.641304347826087,
						"likelihood_diff_stderr,none": 0.19449496218755016,
						"pct_stereotype,none": 0.5630434782608695,
						"pct_stereotype_stderr,none": 0.023151745316873383
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 5.015217391304348,
						"likelihood_diff_stderr,none": 0.4217382457703809,
						"pct_stereotype,none": 0.5652173913043478,
						"pct_stereotype_stderr,none": 0.04642922286356427
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 7.208791208791209,
						"likelihood_diff_stderr,none": 0.5320219646049363,
						"pct_stereotype,none": 0.8131868131868132,
						"pct_stereotype_stderr,none": 0.041084468550358806
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 5.970663265306122,
						"likelihood_diff_stderr,none": 0.37774440779236806,
						"pct_stereotype,none": 0.6530612244897959,
						"pct_stereotype_stderr,none": 0.03408678678944596
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.028051181102362203,
						"exact_match_stderr,none": 0.0036638890384170075
					},
					"glue": {
						"acc,none": 0.4981341061869073,
						"acc_stderr,none": 0.06229194596865805,
						"alias": "glue",
						"f1,none": 0.49302658302405783,
						"f1_stderr,none": 0.000853164778002517,
						"mcc,none": 0.09709025615823998,
						"mcc_stderr,none": 0.0009560515484120038
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.08794541319181198,
						"exact_match_stderr,get-answer": 0.007801162197487723
					},
					"hellaswag": {
						"acc,none": 0.575682135032862,
						"acc_norm,none": 0.7445727942640908,
						"acc_norm_stderr,none": 0.004352098082984433,
						"acc_stderr,none": 0.004932289405608949,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.18004620271440946,
						"acc_norm,none": 0.18004620271440946,
						"acc_norm_stderr,none": 0.03835010467620738,
						"acc_stderr,none": 0.03835010467620738,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816506,
						"acc_stderr,none": 0.04229525846816506,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.131,
						"acc_norm,none": 0.131,
						"acc_norm_stderr,none": 0.01067487484483796,
						"acc_stderr,none": 0.01067487484483796,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.158,
						"acc_norm,none": 0.158,
						"acc_norm_stderr,none": 0.011539894677559554,
						"acc_stderr,none": 0.011539894677559554,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.238,
						"acc_norm,none": 0.238,
						"acc_norm_stderr,none": 0.01347358666196722,
						"acc_stderr,none": 0.01347358666196722,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.242,
						"acc_norm,none": 0.242,
						"acc_norm_stderr,none": 0.013550631705555979,
						"acc_stderr,none": 0.013550631705555979,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.20333333333333334,
						"acc_norm,none": 0.20333333333333334,
						"acc_norm_stderr,none": 0.016444822948814254,
						"acc_stderr,none": 0.016444822948814254,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.122,
						"acc_norm,none": 0.122,
						"acc_norm_stderr,none": 0.010354864712936698,
						"acc_stderr,none": 0.010354864712936698,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.208,
						"acc_norm,none": 0.208,
						"acc_norm_stderr,none": 0.012841374572096925,
						"acc_stderr,none": 0.012841374572096925,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.129,
						"acc_norm,none": 0.129,
						"acc_norm_stderr,none": 0.010605256784796596,
						"acc_stderr,none": 0.010605256784796596,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.129,
						"acc_norm,none": 0.129,
						"acc_norm_stderr,none": 0.010605256784796568,
						"acc_stderr,none": 0.010605256784796568,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.25384615384615383,
						"acc_norm,none": 0.25384615384615383,
						"acc_norm_stderr,none": 0.03831815850874501,
						"acc_stderr,none": 0.03831815850874501,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.0440844002276808,
						"acc_stderr,none": 0.0440844002276808,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.009491579957525057,
						"acc_stderr,none": 0.009491579957525057,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.178,
						"acc_norm,none": 0.178,
						"acc_norm_stderr,none": 0.012102167676183604,
						"acc_stderr,none": 0.012102167676183604,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.013106173040661771,
						"acc_stderr,none": 0.013106173040661771,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.143,
						"acc_norm,none": 0.143,
						"acc_norm_stderr,none": 0.01107581480856704,
						"acc_stderr,none": 0.01107581480856704,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.168,
						"acc_norm,none": 0.168,
						"acc_norm_stderr,none": 0.011828605831454262,
						"acc_stderr,none": 0.011828605831454262,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.169,
						"acc_norm,none": 0.169,
						"acc_norm_stderr,none": 0.011856625977890119,
						"acc_stderr,none": 0.011856625977890119,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.148,
						"acc_norm,none": 0.148,
						"acc_norm_stderr,none": 0.01123486636423524,
						"acc_stderr,none": 0.01123486636423524,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.17,
						"acc_norm,none": 0.17,
						"acc_norm_stderr,none": 0.011884495834541663,
						"acc_stderr,none": 0.011884495834541663,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.04093601807403326,
						"acc_stderr,none": 0.04093601807403326,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.144,
						"acc_norm,none": 0.144,
						"acc_norm_stderr,none": 0.011107987548939149,
						"acc_stderr,none": 0.011107987548939149,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.206,
						"acc_norm,none": 0.206,
						"acc_norm_stderr,none": 0.012795613612786534,
						"acc_stderr,none": 0.012795613612786534,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.143,
						"acc_norm,none": 0.143,
						"acc_norm_stderr,none": 0.011075814808567038,
						"acc_stderr,none": 0.011075814808567038,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.238,
						"acc_norm,none": 0.238,
						"acc_norm_stderr,none": 0.013473586661967228,
						"acc_stderr,none": 0.013473586661967228,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.151,
						"acc_norm,none": 0.151,
						"acc_norm_stderr,none": 0.011328165223341674,
						"acc_stderr,none": 0.011328165223341674,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.211,
						"acc_norm,none": 0.211,
						"acc_norm_stderr,none": 0.01290913032104209,
						"acc_stderr,none": 0.01290913032104209,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.20666666666666667,
						"acc_norm,none": 0.20666666666666667,
						"acc_norm_stderr,none": 0.016544348028215757,
						"acc_stderr,none": 0.016544348028215757,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.216,
						"acc_norm,none": 0.216,
						"acc_norm_stderr,none": 0.013019735539307818,
						"acc_stderr,none": 0.013019735539307818,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.178,
						"acc_norm,none": 0.178,
						"acc_norm_stderr,none": 0.012102167676183571,
						"acc_stderr,none": 0.012102167676183571,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.16,
						"acc_norm,none": 0.16,
						"acc_norm_stderr,none": 0.011598902298688997,
						"acc_stderr,none": 0.011598902298688997,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.177,
						"acc_norm,none": 0.177,
						"acc_norm_stderr,none": 0.012075463420375061,
						"acc_stderr,none": 0.012075463420375061,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.04461960433384741,
						"acc_stderr,none": 0.04461960433384741,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.023555243542102446,
						"acc_stderr,none": 0.023555243542102446,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.239,
						"acc_norm,none": 0.239,
						"acc_norm_stderr,none": 0.013493000446937587,
						"acc_stderr,none": 0.013493000446937587,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.141,
						"acc_norm,none": 0.141,
						"acc_norm_stderr,none": 0.011010914595992434,
						"acc_stderr,none": 0.011010914595992434,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.197,
						"acc_norm,none": 0.197,
						"acc_norm_stderr,none": 0.012583693787968133,
						"acc_stderr,none": 0.012583693787968133,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.194,
						"acc_norm,none": 0.194,
						"acc_norm_stderr,none": 0.012510816141264371,
						"acc_stderr,none": 0.012510816141264371,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.227,
						"acc_norm,none": 0.227,
						"acc_norm_stderr,none": 0.013253174964763925,
						"acc_stderr,none": 0.013253174964763925,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.205,
						"acc_norm,none": 0.205,
						"acc_norm_stderr,none": 0.028617649261360196,
						"acc_stderr,none": 0.028617649261360196,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.196,
						"acc_norm,none": 0.196,
						"acc_norm_stderr,none": 0.012559527926707366,
						"acc_stderr,none": 0.012559527926707366,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.49747862310896734,
						"acc_norm,none": 0.428,
						"acc_norm_stderr,none": 0.0004906132264529048,
						"acc_stderr,none": 0.03897165775489114,
						"alias": "kobest",
						"f1,none": 0.4053578860148066,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5128205128205128,
						"acc_stderr,none": 0.013344378621956914,
						"alias": " - kobest_boolq",
						"f1,none": 0.36466049015171365,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.525,
						"acc_stderr,none": 0.015799513429996012,
						"alias": " - kobest_copa",
						"f1,none": 0.5242001805034262,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.366,
						"acc_norm,none": 0.428,
						"acc_norm_stderr,none": 0.022149790663861923,
						"acc_stderr,none": 0.021564276850201618,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3619884181584249,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5692695214105793,
						"acc_stderr,none": 0.024883655207256227,
						"alias": " - kobest_sentineg",
						"f1,none": 0.5500745600954369,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6124587618862798,
						"acc_stderr,none": 0.0187324309858424,
						"alias": "lambada",
						"perplexity,none": 4.962766493318823,
						"perplexity_stderr,none": 0.4476374693887904
					},
					"lambada_cloze": {
						"acc,none": 0.16922181253638657,
						"acc_stderr,none": 0.03924532961242276,
						"alias": "lambada_cloze",
						"perplexity,none": 151.06512009351397,
						"perplexity_stderr,none": 53.82900314323085
					},
					"lambada_multilingual": {
						"acc,none": 0.3987968173879294,
						"acc_stderr,none": 0.07219791091903441,
						"alias": "lambada_multilingual",
						"perplexity,none": 125.38377800406174,
						"perplexity_stderr,none": 52.40235622230799
					},
					"lambada_openai": {
						"acc,none": 0.6481661168251504,
						"acc_stderr,none": 0.006653100223974357,
						"alias": " - lambada_openai",
						"perplexity,none": 4.129743234473909,
						"perplexity_stderr,none": 0.1227205231848723
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.24704055889772947,
						"acc_stderr,none": 0.006008720389692808,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 44.3353373022915,
						"perplexity_stderr,none": 1.6107592783411016
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.2907044440131962,
						"acc_stderr,none": 0.006326325417865827,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 246.11800774380708,
						"perplexity_stderr,none": 19.316329918931068
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6435086357461672,
						"acc_stderr,none": 0.006672886984196206,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.134202545922669,
						"perplexity_stderr,none": 0.12270762284372046
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.30448282553852124,
						"acc_stderr,none": 0.006411319244787222,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 159.1772156006747,
						"perplexity_stderr,none": 11.435426426986673
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.4022899281971667,
						"acc_stderr,none": 0.00683167094107339,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 74.82986938606717,
						"perplexity_stderr,none": 5.185660290007928
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.3529982534445954,
						"acc_stderr,none": 0.006658111712346047,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 142.65959474383715,
						"perplexity_stderr,none": 10.916790300284314
					},
					"lambada_standard": {
						"acc,none": 0.5775276537939065,
						"acc_stderr,none": 0.0068817296664499105,
						"alias": " - lambada_standard",
						"perplexity,none": 5.800355514538693,
						"perplexity_stderr,none": 0.18698529342007775
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.09140306617504367,
						"acc_stderr,none": 0.004014931024485408,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 257.7949028847365,
						"perplexity_stderr,none": 9.81642725072017
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.2907124681933842,
						"exact_match_stderr,get-answer": 0.011456577557813215
					},
					"logiqa": {
						"acc,none": 0.24270353302611367,
						"acc_norm,none": 0.2672811059907834,
						"acc_norm_stderr,none": 0.0173578586224101,
						"acc_stderr,none": 0.01681567620647953,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2595419847328244,
						"acc_norm,none": 0.2881679389312977,
						"acc_norm_stderr,none": 0.011426770634965255,
						"acc_stderr,none": 0.01106027531025994,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2693467336683417,
						"acc_norm,none": 0.27571189279731995,
						"acc_norm_stderr,none": 0.008180578520830267,
						"acc_stderr,none": 0.008121048652111601,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.48877356492268587,
						"acc_stderr,none": 0.005144598741777583,
						"alias": "mc_taco",
						"f1,none": 0.555811171436459,
						"f1_stderr,none": 0.005740716620645664
					},
					"medmcqa": {
						"acc,none": 0.2995457805402821,
						"acc_norm,none": 0.2995457805402821,
						"acc_norm_stderr,none": 0.007083199383786312,
						"acc_stderr,none": 0.007083199383786312,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3040062843676355,
						"acc_norm,none": 0.3040062843676355,
						"acc_norm_stderr,none": 0.012897346986972818,
						"acc_stderr,none": 0.012897346986972818,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.387978920381712,
						"acc_stderr,none": 0.08113348217717196,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.4444444444444444,
						"acc_stderr,none": 0.04292596718256981,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3815789473684211,
						"acc_stderr,none": 0.03953173377749194,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.04923659639173309,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.4679245283018868,
						"acc_stderr,none": 0.030709486992556545,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.4375,
						"acc_stderr,none": 0.04148415739394154,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001975,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.3699421965317919,
						"acc_stderr,none": 0.0368122963339432,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.04092563958237655,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.05021167315686781,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3276595744680851,
						"acc_stderr,none": 0.030683020843231008,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.03999423879281337,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.4482758620689655,
						"acc_stderr,none": 0.04144311810878152,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.23015873015873015,
						"acc_stderr,none": 0.021679219663693145,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.3253968253968254,
						"acc_stderr,none": 0.041905964388711366,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.45161290322580644,
						"acc_stderr,none": 0.02831050034856839,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2660098522167488,
						"acc_stderr,none": 0.03108982600293752,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.41,
						"acc_stderr,none": 0.049431107042371025,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.4727272727272727,
						"acc_stderr,none": 0.0389853160557942,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.4595959595959596,
						"acc_stderr,none": 0.03550702465131341,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.5284974093264249,
						"acc_stderr,none": 0.03602573571288442,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.4282051282051282,
						"acc_stderr,none": 0.02508830145469483,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2111111111111111,
						"acc_stderr,none": 0.02488211685765509,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.39915966386554624,
						"acc_stderr,none": 0.031811100324139245,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.32450331125827814,
						"acc_stderr,none": 0.03822746937658753,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.5027522935779817,
						"acc_stderr,none": 0.02143699835976532,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.28703703703703703,
						"acc_stderr,none": 0.030851992993257013,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.4166666666666667,
						"acc_stderr,none": 0.0346022832723917,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.5021097046413502,
						"acc_stderr,none": 0.032546938018020076,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4304932735426009,
						"acc_stderr,none": 0.0332319730294294,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.4732824427480916,
						"acc_stderr,none": 0.04379024936553894,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.34962805526036134,
						"acc_stderr,none": 0.07002839597187732,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.39669421487603307,
						"acc_stderr,none": 0.044658697805310094,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.42592592592592593,
						"acc_stderr,none": 0.0478034362693679,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3803680981595092,
						"acc_stderr,none": 0.03814269893261837,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.35714285714285715,
						"acc_stderr,none": 0.04547960999764376,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4854368932038835,
						"acc_stderr,none": 0.04948637324026637,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.5982905982905983,
						"acc_stderr,none": 0.032116937510516204,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.05016135580465919,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.5274584929757343,
						"acc_stderr,none": 0.01785298126663394,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3901734104046243,
						"acc_stderr,none": 0.026261677607806632,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.26033519553072626,
						"acc_stderr,none": 0.014676252009319475,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.4117647058823529,
						"acc_stderr,none": 0.028180596328259287,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.4486643064048922,
						"acc_stderr,none": 0.06485219735562017,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.4180064308681672,
						"acc_stderr,none": 0.02801365189199507,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3734567901234568,
						"acc_stderr,none": 0.026915003011380154,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.34397163120567376,
						"acc_stderr,none": 0.02833801742861132,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.29921773142112124,
						"acc_stderr,none": 0.011695374630696037,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.3235294117647059,
						"acc_stderr,none": 0.02841820861940679,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.35130718954248363,
						"acc_stderr,none": 0.019312676065786565,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.0469237132203465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.4448979591836735,
						"acc_stderr,none": 0.031814251181977865,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.4413389665258369,
						"acc_stderr,none": 0.07405794463654024,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.5970149253731343,
						"acc_stderr,none": 0.034683432951111266,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.08342086024648186,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.050211673156867795,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.4397590361445783,
						"acc_stderr,none": 0.03864139923699122,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.5146198830409356,
						"acc_stderr,none": 0.038331852752130254,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.39449821701477333,
						"acc_stderr,none": 0.004933523584717726,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3927990235964198,
						"acc_stderr,none": 0.004925525619694046,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6911764705882353,
						"acc_stderr,none": 0.022900895184021622,
						"alias": "mrpc",
						"f1,none": 0.8055555555555556,
						"f1_stderr,none": 0.017005113287039464
					},
					"multimedqa": {
						"acc,none": 0.34946770759403833,
						"acc_norm,none": 0.30366458087912734,
						"acc_norm_stderr,none": 9.344719787491192e-05,
						"acc_stderr,none": 0.09857132851700397,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.554042904290429,
						"acc_stderr,none": 0.007139729003586973,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7139766755141739,
						"mrr_stderr,none": 0.010258911553967964,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.42325056433408575,
						"r@2_stderr,none": 0.016608129658774624
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6590481583138352,
						"mrr_stderr,none": 0.010426811623874994,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4525959367945824,
						"r@2_stderr,none": 0.016731608666774797
					},
					"openbookqa": {
						"acc,none": 0.324,
						"acc_norm,none": 0.454,
						"acc_norm_stderr,none": 0.022288147591176945,
						"acc_stderr,none": 0.020950557312477452,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4585,
						"acc_stderr,none": 0.01114454913793035,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3405,
						"acc_stderr,none": 0.010598869893602354,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.3935,
						"acc_stderr,none": 0.010926507643554023,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.4915,
						"acc_stderr,none": 0.011181519941139164,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.4945,
						"acc_stderr,none": 0.011182459420867635,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.4575,
						"acc_stderr,none": 0.011142663706548617,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4825,
						"acc_stderr,none": 0.011176284251254184,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4455,
						"acc_stderr,none": 0.042824168951484,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7861806311207835,
						"acc_norm,none": 0.780195865070729,
						"acc_norm_stderr,none": 0.009661958616651764,
						"acc_stderr,none": 0.009565994206915599,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.27449829205807,
						"acc_norm,none": 0.3183710503842869,
						"acc_norm_stderr,none": 0.0034034074877932973,
						"acc_stderr,none": 0.0032603347595525065,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177528,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7174533062085588,
						"acc_norm,none": 0.6109967475656303,
						"acc_norm_stderr,none": 0.003950076888378656,
						"acc_stderr,none": 0.1400658818845559,
						"alias": "pythia",
						"bits_per_byte,none": 0.704247037048816,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6292940912260545,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.129743234473909,
						"perplexity_stderr,none": 0.1227205231848723,
						"word_perplexity,none": 13.603691301331226,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4078014184397163,
						"acc_norm,none": 0.4716312056737589,
						"acc_norm_stderr,none": 0.04608954817439849,
						"acc_stderr,none": 0.03787818478503423,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.45,
						"acc_norm,none": 0.5666666666666667,
						"acc_norm_stderr,none": 0.04542567625794981,
						"acc_stderr,none": 0.04560517440787952,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.4,
						"acc_norm,none": 0.48125,
						"acc_norm_stderr,none": 0.03962468875738331,
						"acc_stderr,none": 0.038851434494290536,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.39436619718309857,
						"acc_norm,none": 0.426056338028169,
						"acc_norm_stderr,none": 0.0293950991596978,
						"acc_stderr,none": 0.029051039507650152,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.542559033498078,
						"acc_stderr,none": 0.006740858011253848,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5323027454860252,
						"acc_stderr,none": 0.002481505623287183,
						"alias": "qqp",
						"f1,none": 0.4900897985599871,
						"f1_stderr,none": 0.0031799761343353486
					},
					"race": {
						"acc,none": 0.40382775119617226,
						"acc_stderr,none": 0.015185661294969257,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5848375451263538,
						"acc_stderr,none": 0.029660066290893485,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.931,
						"acc_norm,none": 0.883,
						"acc_norm_stderr,none": 0.010169287802713329,
						"acc_stderr,none": 0.008018934050315153,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5848375451263538,
						"acc_stderr,none": 0.029660066290893485,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.7970183486238532,
						"acc_stderr,none": 0.013628669913308699,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5717284814555633,
						"acc_norm,none": 0.7431770468859342,
						"acc_norm_stderr,none": 0.003088828415135528,
						"acc_stderr,none": 0.0034985270221296106,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.7578449968387075,
						"acc_stderr,none": 0.08602125082153045,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.8764022435897436,
						"acc_stderr,none": 0.0032940220232889348,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8587209891557718,
						"acc_stderr,none": 0.003506665223133934,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5442156862745098,
						"acc_stderr,none": 0.004931583820741781,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3569589444747222,
						"acc_stderr,none": 0.04452645811644175,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3623011015911873,
						"bleu_acc_stderr,none": 0.00028313604580514556,
						"bleu_diff,none": -2.685211026642102,
						"bleu_diff_stderr,none": 0.7273648142299285,
						"bleu_max,none": 25.017825146929578,
						"bleu_max_stderr,none": 0.6088844370863896,
						"rouge1_acc,none": 0.35495716034271724,
						"rouge1_acc_stderr,none": 0.00028059139051979436,
						"rouge1_diff,none": -3.536793507808863,
						"rouge1_diff_stderr,none": 1.1025403294766978,
						"rouge1_max,none": 49.04286032061536,
						"rouge1_max_stderr,none": 0.7988980441653172,
						"rouge2_acc,none": 0.2974296205630355,
						"rouge2_acc_stderr,none": 0.0002560848546259371,
						"rouge2_diff,none": -4.3183258454722315,
						"rouge2_diff_stderr,none": 1.3814913542224783,
						"rouge2_max,none": 33.91839802600762,
						"rouge2_max_stderr,none": 1.040522765352985,
						"rougeL_acc,none": 0.35128518971848227,
						"rougeL_acc_stderr,none": 0.000279269491670262,
						"rougeL_diff,none": -3.527270175981697,
						"rougeL_diff_stderr,none": 1.1189000914073388,
						"rougeL_max,none": 46.450018931777564,
						"rougeL_max_stderr,none": 0.8070054248391189
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3623011015911873,
						"bleu_acc_stderr,none": 0.016826646897262258,
						"bleu_diff,none": -2.685211026642102,
						"bleu_diff_stderr,none": 0.8528568544778945,
						"bleu_max,none": 25.017825146929578,
						"bleu_max_stderr,none": 0.7803104748024273,
						"rouge1_acc,none": 0.35495716034271724,
						"rouge1_acc_stderr,none": 0.0167508623813759,
						"rouge1_diff,none": -3.536793507808863,
						"rouge1_diff_stderr,none": 1.050019204337091,
						"rouge1_max,none": 49.04286032061536,
						"rouge1_max_stderr,none": 0.8938109666844087,
						"rouge2_acc,none": 0.2974296205630355,
						"rouge2_acc_stderr,none": 0.016002651487360995,
						"rouge2_diff,none": -4.3183258454722315,
						"rouge2_diff_stderr,none": 1.17536860355485,
						"rouge2_max,none": 33.91839802600762,
						"rouge2_max_stderr,none": 1.0200601773194486,
						"rougeL_acc,none": 0.35128518971848227,
						"rougeL_acc_stderr,none": 0.0167113581635444,
						"rougeL_diff,none": -3.527270175981697,
						"rougeL_diff_stderr,none": 1.057780738814684,
						"rougeL_max,none": 46.450018931777564,
						"rougeL_max_stderr,none": 0.8983348066501258
					},
					"truthfulqa_mc1": {
						"acc,none": 0.26805385556915545,
						"acc_stderr,none": 0.015506204722834559,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.40141148892750556,
						"acc_stderr,none": 0.014766493203916013,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.028051181102362203,
						"exact_match_stderr,none": 0.0036638890384170075
					},
					"wic": {
						"acc,none": 0.5517241379310345,
						"acc_stderr,none": 0.01970443349753693,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.704247037048816,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6292940912260545,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 13.603691301331226,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6732438831886346,
						"acc_stderr,none": 0.013181997302131359,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.49295774647887325,
						"acc_stderr,none": 0.05975550263548289,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6730769230769231,
						"acc_stderr,none": 0.04622070089521466,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8168498168498168,
						"acc_stderr,none": 0.02345256426170499,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.04705888524334253,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.464,
						"acc_stderr,none": 0.02232498173838525,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.022380208834928028,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044285,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.02204949796982787,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.022378596989230774,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.02223197069632112,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.562,
						"acc_stderr,none": 0.022210326363977417,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.022324981738385253,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.634,
						"acc_stderr,none": 0.021564276850201618,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.39400267737617134,
						"acc_stderr,none": 0.04975289821772939,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3369477911646586,
						"acc_stderr,none": 0.00947420377875771,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.40080321285140563,
						"acc_stderr,none": 0.00982285847304738,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4389558232931727,
						"acc_stderr,none": 0.009947100105978367,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.348995983935743,
						"acc_stderr,none": 0.009554095988300685,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5381526104417671,
						"acc_stderr,none": 0.00999285357974996,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4566265060240964,
						"acc_stderr,none": 0.00998429341084031,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4606425702811245,
						"acc_stderr,none": 0.009990976095711899,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.35180722891566263,
						"acc_stderr,none": 0.009571764897113625,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.42971887550200805,
						"acc_stderr,none": 0.00992257215360778,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.00944890091461762,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3497991967871486,
						"acc_stderr,none": 0.00955918147477829,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.37389558232931724,
						"acc_stderr,none": 0.009698087600721304,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.342570281124498,
						"acc_stderr,none": 0.009512333319470373,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3690763052208835,
						"acc_stderr,none": 0.009672395644470429,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.378714859437751,
						"acc_stderr,none": 0.009722751990000575,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5640454846278804,
						"acc_stderr,none": 0.06251268358839181,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.48643282594308407,
						"acc_stderr,none": 0.012862387586650073,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7961614824619457,
						"acc_stderr,none": 0.010367050974022214,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6485771012574454,
						"acc_stderr,none": 0.012285910871738333,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5056254136333554,
						"acc_stderr,none": 0.012866310923072518,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5036399735274653,
						"acc_stderr,none": 0.01286678434828923,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5499669093315684,
						"acc_stderr,none": 0.012802713598219839,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4824619457313038,
						"acc_stderr,none": 0.012859207453266306,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5777630708140304,
						"acc_stderr,none": 0.012710555263676445,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5016545334215751,
						"acc_stderr,none": 0.012867054869163341,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5373924553275976,
						"acc_stderr,none": 0.01283109334701656,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.614824619457313,
						"acc_stderr,none": 0.012523231571141193,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7446617217352214,
						"acc_stderr,none": 0.07732550176464906,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8554838709677419,
						"acc_stderr,none": 0.007293668342043698,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6746987951807228,
						"acc_stderr,none": 0.05173576521112386,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5766423357664233,
						"acc_stderr,none": 0.015963356799273146,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6768060836501901,
						"acc_stderr,none": 0.028894359362917902,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6158730158730159,
						"acc_stderr,none": 0.027448471944317758,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6805555555555556,
						"acc_stderr,none": 0.020789568197560084,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "mosaicml/mpt-7b-chat"
	},
	"mosaicml/mpt-7b-instruct": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6513528748590756,
						"acc_norm,none": 0.6313416009019166,
						"acc_norm_stderr,none": 0.04305951224838435,
						"acc_stderr,none": 0.05366924245322849,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3459375,
						"acc_stderr,none": 0.015139559437046077,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0622,
						"acc_stderr,none": 0.07194250704060459,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8297462686567164,
						"acc_stderr,none": 0.1493143646824782,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2652303120356613,
						"acc_norm,none": 0.2652303120356613,
						"acc_norm_stderr,none": 0.11655469671100141,
						"acc_stderr,none": 0.11655469671100141,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.27024693489898133,
						"acc_norm,none": 0.27024693489898133,
						"acc_norm_stderr,none": 0.04298243803883009,
						"acc_stderr,none": 0.04298243803883009,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.036635360763268,
						"likelihood_diff_stderr,none": 0.4901353373352784,
						"pct_stereotype,none": 0.5803518187239117,
						"pct_stereotype_stderr,none": 0.08271671870695962
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.050688976377952756,
						"exact_match_stderr,none": 0.004867501128272265
					},
					"glue": {
						"acc,none": 0.479206309148114,
						"acc_stderr,none": 0.054810303964793076,
						"alias": "glue",
						"f1,none": 0.4717161946832843,
						"f1_stderr,none": 0.0011105112327696215,
						"mcc,none": -0.007714209864949741,
						"mcc_stderr,none": 0.0009498954058808936
					},
					"kmmlu": {
						"acc,none": 0.20239676580999133,
						"acc_norm,none": 0.20239676580999133,
						"acc_norm_stderr,none": 0.027636493161657106,
						"acc_stderr,none": 0.027636493161657106,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5036176277132208,
						"acc_norm,none": 0.444,
						"acc_norm_stderr,none": 0.0004947174348697385,
						"acc_stderr,none": 0.041210441738356404,
						"alias": "kobest",
						"f1,none": 0.4133437594436588,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6535998447506307,
						"acc_stderr,none": 0.014074500863608893,
						"alias": "lambada",
						"perplexity,none": 4.126279971628858,
						"perplexity_stderr,none": 0.25584729597230355
					},
					"lambada_cloze": {
						"acc,none": 0.31651465165922765,
						"acc_stderr,none": 0.06251237921593697,
						"alias": "lambada_cloze",
						"perplexity,none": 42.093317573255824,
						"perplexity_stderr,none": 11.643775612799784
					},
					"lambada_multilingual": {
						"acc,none": 0.4358237919658451,
						"acc_stderr,none": 0.07212349125264439,
						"alias": "lambada_multilingual",
						"perplexity,none": 56.74911050585341,
						"perplexity_stderr,none": 18.34058806043957
					},
					"mmlu": {
						"acc,none": 0.33371314627545934,
						"acc_stderr,none": 0.050657906692161726,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3128586609989373,
						"acc_stderr,none": 0.04127974123644089,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3656260057933698,
						"acc_stderr,none": 0.048435890587305255,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.36139096522586933,
						"acc_stderr,none": 0.048509082153170346,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.3063748810656518,
						"acc_stderr,none": 0.0527897662798877,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.3274662881476224,
						"acc_norm,none": 0.285780658046265,
						"acc_norm_stderr,none": 0.00012215371299257569,
						"acc_stderr,none": 0.10854710136870047,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.42478571428571427,
						"acc_stderr,none": 0.056278187344401,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7354546405044216,
						"acc_norm,none": 0.6355225951200462,
						"acc_norm_stderr,none": 0.00417579266980185,
						"acc_stderr,none": 0.14075002403470363,
						"alias": "pythia",
						"bits_per_byte,none": 0.6448255664376533,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5635502288907681,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.6551778507970356,
						"perplexity_stderr,none": 0.08614789189662851,
						"word_perplexity,none": 10.914490553993597,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4308510638297872,
						"acc_norm,none": 0.4734042553191489,
						"acc_norm_stderr,none": 0.04792229120706373,
						"acc_stderr,none": 0.039126933775720726,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.6888289907157832,
						"acc_stderr,none": 0.05356764476381529,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3103004502815094,
						"acc_stderr,none": 0.04143104790883736,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.0002590774867436335,
						"bleu_diff,none": -5.670405867003985,
						"bleu_diff_stderr,none": 0.39849578962290877,
						"bleu_max,none": 18.104395474342578,
						"bleu_max_stderr,none": 0.5328426786199876,
						"rouge1_acc,none": 0.2974296205630355,
						"rouge1_acc_stderr,none": 0.0002560848546259371,
						"rouge1_diff,none": -7.709133785247639,
						"rouge1_diff_stderr,none": 0.4709516591920785,
						"rouge1_max,none": 38.430354870153614,
						"rouge1_max_stderr,none": 0.9162227976556272,
						"rouge2_acc,none": 0.22643818849449204,
						"rouge2_acc_stderr,none": 0.00021466168539929287,
						"rouge2_diff,none": -8.748756247459179,
						"rouge2_diff_stderr,none": 0.6146014745962853,
						"rouge2_max,none": 25.047562880936276,
						"rouge2_max_stderr,none": 0.8745852039294537,
						"rougeL_acc,none": 0.2864137086903305,
						"rougeL_acc_stderr,none": 0.0002504667845154174,
						"rougeL_diff,none": -8.036232250881685,
						"rougeL_diff_stderr,none": 0.4933861748879933,
						"rougeL_max,none": 36.011171090048414,
						"rougeL_max_stderr,none": 0.8814889440758711
					},
					"xcopa": {
						"acc,none": 0.5370909090909091,
						"acc_stderr,none": 0.041918525545404726,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.394136546184739,
						"acc_stderr,none": 0.052367471170260985,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5674147163227242,
						"acc_stderr,none": 0.08216700222453983,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7680377612946729,
						"acc_stderr,none": 0.07631041097011208,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6513528748590756,
						"acc_norm,none": 0.6313416009019166,
						"acc_norm_stderr,none": 0.04305951224838435,
						"acc_stderr,none": 0.05366924245322849,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3459375,
						"acc_stderr,none": 0.015139559437046077,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.337,
						"acc_stderr,none": 0.014955087918653614,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.344,
						"acc_stderr,none": 0.015029633724408947,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.355,
						"acc_stderr,none": 0.013819249004047296,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.4249146757679181,
						"acc_norm,none": 0.4522184300341297,
						"acc_norm_stderr,none": 0.01454451988063383,
						"acc_stderr,none": 0.014445698968520769,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7630471380471381,
						"acc_norm,none": 0.7196969696969697,
						"acc_norm_stderr,none": 0.00921630686408803,
						"acc_stderr,none": 0.008725189261472277,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0622,
						"acc_stderr,none": 0.07194250704060459,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.014,
						"acc_stderr,none": 0.002627822811066808,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.194,
						"acc_stderr,none": 0.008844269927771193,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0655,
						"acc_stderr,none": 0.00553355085750055,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.2515,
						"acc_stderr,none": 0.009704172323296928,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.048,
						"acc_stderr,none": 0.004781153596660243,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.045,
						"acc_stderr,none": 0.0046366204421499965,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.002,
						"acc_stderr,none": 0.0009992493430694993,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.002,
						"acc_stderr,none": 0.00099924934306949,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.030802603036876357,
						"acc_stderr,none": 0.003599636042259183,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8297462686567164,
						"acc_stderr,none": 0.1493143646824782,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704157,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437603,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844884,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.012841374572096928,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.834,
						"acc_stderr,none": 0.011772110370812185,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.747,
						"acc_stderr,none": 0.01375427861358708,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.585,
						"acc_stderr,none": 0.01558903518560463,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.012953717566737228,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.867,
						"acc_stderr,none": 0.010743669132397325,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.989,
						"acc_stderr,none": 0.003299983316607816,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410038,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792938,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315169,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706819,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139978,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592074,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.971,
						"acc_stderr,none": 0.005309160685756975,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.009406619184621264,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.739,
						"acc_stderr,none": 0.013895037677965134,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.775,
						"acc_stderr,none": 0.013211720158614751,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.838,
						"acc_stderr,none": 0.011657267771304434,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295436,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.011358918303475282,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.0038457495745030067,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.457,
						"acc_stderr,none": 0.01576069159013638,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525061,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557843981,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.659,
						"acc_stderr,none": 0.014998131348402706,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.012259457340938572,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410045,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.862,
						"acc_stderr,none": 0.010912152632504397,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.00914437639315112,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695792,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.478,
						"acc_stderr,none": 0.015803979428161957,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.009276910103103296,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.767,
						"acc_stderr,none": 0.013374972519220072,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.561,
						"acc_stderr,none": 0.015701131345400767,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.669,
						"acc_stderr,none": 0.014888272588203938,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.00918887563499669,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724435,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653885,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340992,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.013127502859696228,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.008018934050315148,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.0019969947390987295,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.864,
						"acc_stderr,none": 0.01084535023047299,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.737,
						"acc_stderr,none": 0.013929286594259734,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.486,
						"acc_stderr,none": 0.015813097547730987,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706807,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.917,
						"acc_stderr,none": 0.008728527206074787,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426683,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.784,
						"acc_stderr,none": 0.013019735539307855,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.015816135752773203,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724413,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.00940661918462124,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.695,
						"acc_stderr,none": 0.01456664639466439,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.011419913065098706,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.844,
						"acc_stderr,none": 0.011480235006122367,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.01064016979249935,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.01107581480856704,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.936,
						"acc_stderr,none": 0.007743640226919289,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333375,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.0052195060344100395,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275289,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.014356395999905687,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.279,
						"acc_stderr,none": 0.014190150117612025,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7262996941896025,
						"acc_stderr,none": 0.0077980876386284275,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.48214285714285715,
						"acc_stderr,none": 0.06737697508644648,
						"alias": "cb",
						"f1,none": 0.28777777777777774,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2652303120356613,
						"acc_norm,none": 0.2652303120356613,
						"acc_norm_stderr,none": 0.11655469671100141,
						"acc_stderr,none": 0.11655469671100141,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966264,
						"acc_stderr,none": 0.06520506636966264,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757577,
						"acc_stderr,none": 0.07575757575757577,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.07226812131946557,
						"acc_stderr,none": 0.07226812131946557,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.3404255319148936,
						"acc_norm,none": 0.3404255319148936,
						"acc_norm_stderr,none": 0.06986570800554745,
						"acc_stderr,none": 0.06986570800554745,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.13636363636363635,
						"acc_norm,none": 0.13636363636363635,
						"acc_norm_stderr,none": 0.0748867700952649,
						"acc_stderr,none": 0.0748867700952649,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.059278386873217015,
						"acc_stderr,none": 0.059278386873217015,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.07802030664724673,
						"acc_stderr,none": 0.07802030664724673,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.5789473684210527,
						"acc_norm,none": 0.5789473684210527,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.11180339887498948,
						"acc_stderr,none": 0.11180339887498948,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.08446516354424752,
						"acc_stderr,none": 0.08446516354424752,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031764,
						"acc_stderr,none": 0.07633651333031764,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.085947008518708,
						"acc_stderr,none": 0.085947008518708,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482894,
						"acc_stderr,none": 0.09810018692482894,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.08191780219091252,
						"acc_stderr,none": 0.08191780219091252,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.10154334054280735,
						"acc_stderr,none": 0.10154334054280735,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.13636363636363635,
						"acc_norm,none": 0.13636363636363635,
						"acc_norm_stderr,none": 0.07488677009526491,
						"acc_stderr,none": 0.07488677009526491,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.11433239009500591,
						"acc_stderr,none": 0.11433239009500591,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.20689655172413793,
						"acc_norm,none": 0.20689655172413793,
						"acc_norm_stderr,none": 0.07655305550699534,
						"acc_stderr,none": 0.07655305550699534,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.08163265306122448,
						"acc_norm,none": 0.08163265306122448,
						"acc_norm_stderr,none": 0.03952023822660627,
						"acc_stderr,none": 0.03952023822660627,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.07335878043508444,
						"acc_stderr,none": 0.07335878043508444,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.06545849153992006,
						"acc_stderr,none": 0.06545849153992006,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.27024693489898133,
						"acc_norm,none": 0.27024693489898133,
						"acc_norm_stderr,none": 0.04298243803883009,
						"acc_stderr,none": 0.04298243803883009,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28402366863905326,
						"acc_norm,none": 0.28402366863905326,
						"acc_norm_stderr,none": 0.03479140427262331,
						"acc_stderr,none": 0.03479140427262331,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2635135135135135,
						"acc_norm,none": 0.2635135135135135,
						"acc_norm_stderr,none": 0.036335000433819875,
						"acc_stderr,none": 0.036335000433819875,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.03494959016177541,
						"acc_stderr,none": 0.03494959016177541,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865142,
						"acc_stderr,none": 0.03462157845865142,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2787878787878788,
						"acc_norm,none": 0.2787878787878788,
						"acc_norm_stderr,none": 0.03501438706296781,
						"acc_stderr,none": 0.03501438706296781,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.28708133971291866,
						"acc_norm,none": 0.28708133971291866,
						"acc_norm_stderr,none": 0.0313682872148917,
						"acc_stderr,none": 0.0313682872148917,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.35877862595419846,
						"acc_norm,none": 0.35877862595419846,
						"acc_norm_stderr,none": 0.04206739313864908,
						"acc_stderr,none": 0.04206739313864908,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.27941176470588236,
						"acc_norm,none": 0.27941176470588236,
						"acc_norm_stderr,none": 0.03861882389311727,
						"acc_stderr,none": 0.03861882389311727,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2616822429906542,
						"acc_norm,none": 0.2616822429906542,
						"acc_norm_stderr,none": 0.0426929191572811,
						"acc_stderr,none": 0.0426929191572811,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25696594427244585,
						"acc_norm,none": 0.25696594427244585,
						"acc_norm_stderr,none": 0.024350854676330122,
						"acc_stderr,none": 0.024350854676330122,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.030964517926923413,
						"acc_stderr,none": 0.030964517926923413,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2849162011173184,
						"acc_norm,none": 0.2849162011173184,
						"acc_norm_stderr,none": 0.03383195081328524,
						"acc_stderr,none": 0.03383195081328524,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.24050632911392406,
						"acc_norm,none": 0.24050632911392406,
						"acc_norm_stderr,none": 0.027820781981149675,
						"acc_stderr,none": 0.027820781981149675,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371222,
						"acc_stderr,none": 0.04198857662371222,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.2803738317757009,
						"acc_norm,none": 0.2803738317757009,
						"acc_norm_stderr,none": 0.043628399335701,
						"acc_stderr,none": 0.043628399335701,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.04084247315337099,
						"acc_stderr,none": 0.04084247315337099,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04186091791394607,
						"acc_stderr,none": 0.04186091791394607,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.043362909039199406,
						"acc_stderr,none": 0.043362909039199406,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.32075471698113206,
						"acc_norm,none": 0.32075471698113206,
						"acc_norm_stderr,none": 0.04555176317903526,
						"acc_stderr,none": 0.04555176317903526,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2271062271062271,
						"acc_norm,none": 0.2271062271062271,
						"acc_norm_stderr,none": 0.025403290424595153,
						"acc_stderr,none": 0.025403290424595153,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.27450980392156865,
						"acc_norm,none": 0.27450980392156865,
						"acc_norm_stderr,none": 0.031321798030832904,
						"acc_stderr,none": 0.031321798030832904,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2807017543859649,
						"acc_norm,none": 0.2807017543859649,
						"acc_norm_stderr,none": 0.034462962170884265,
						"acc_stderr,none": 0.034462962170884265,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.03814280082617516,
						"acc_stderr,none": 0.03814280082617516,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.302158273381295,
						"acc_norm,none": 0.302158273381295,
						"acc_norm_stderr,none": 0.03908914479291562,
						"acc_stderr,none": 0.03908914479291562,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.29559748427672955,
						"acc_norm,none": 0.29559748427672955,
						"acc_norm_stderr,none": 0.036302143777231344,
						"acc_stderr,none": 0.036302143777231344,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25766871165644173,
						"acc_norm,none": 0.25766871165644173,
						"acc_norm_stderr,none": 0.03436150827846917,
						"acc_stderr,none": 0.03436150827846917,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.26744186046511625,
						"acc_norm,none": 0.26744186046511625,
						"acc_norm_stderr,none": 0.033848364281578586,
						"acc_stderr,none": 0.033848364281578586,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.02747460833869742,
						"acc_stderr,none": 0.02747460833869742,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.23737373737373738,
						"acc_norm,none": 0.23737373737373738,
						"acc_norm_stderr,none": 0.030313710538198892,
						"acc_stderr,none": 0.030313710538198892,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.3067226890756303,
						"acc_norm,none": 0.3067226890756303,
						"acc_norm_stderr,none": 0.02995382389188704,
						"acc_stderr,none": 0.02995382389188704,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.23478260869565218,
						"acc_norm,none": 0.23478260869565218,
						"acc_norm_stderr,none": 0.028009647070930125,
						"acc_stderr,none": 0.028009647070930125,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03944624162501116,
						"acc_stderr,none": 0.03944624162501116,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.26573426573426573,
						"acc_norm,none": 0.26573426573426573,
						"acc_norm_stderr,none": 0.03706860462623558,
						"acc_stderr,none": 0.03706860462623558,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.23295454545454544,
						"acc_norm,none": 0.23295454545454544,
						"acc_norm_stderr,none": 0.031954139030501774,
						"acc_stderr,none": 0.031954139030501774,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2751677852348993,
						"acc_norm,none": 0.2751677852348993,
						"acc_norm_stderr,none": 0.03671019403342561,
						"acc_stderr,none": 0.03671019403342561,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.2878787878787879,
						"acc_norm,none": 0.2878787878787879,
						"acc_norm_stderr,none": 0.039559076642353884,
						"acc_stderr,none": 0.039559076642353884,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.288135593220339,
						"acc_norm,none": 0.288135593220339,
						"acc_norm_stderr,none": 0.04187011593049808,
						"acc_stderr,none": 0.04187011593049808,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2073170731707317,
						"acc_norm,none": 0.2073170731707317,
						"acc_norm_stderr,none": 0.031752175360736774,
						"acc_stderr,none": 0.031752175360736774,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878284,
						"acc_stderr,none": 0.04122066502878284,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.23776223776223776,
						"acc_norm,none": 0.23776223776223776,
						"acc_norm_stderr,none": 0.035725021418155686,
						"acc_stderr,none": 0.035725021418155686,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.29365079365079366,
						"acc_norm,none": 0.29365079365079366,
						"acc_norm_stderr,none": 0.04073524322147127,
						"acc_stderr,none": 0.04073524322147127,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.31351351351351353,
						"acc_norm,none": 0.31351351351351353,
						"acc_norm_stderr,none": 0.034200717507564114,
						"acc_stderr,none": 0.034200717507564114,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.29927007299270075,
						"acc_norm,none": 0.29927007299270075,
						"acc_norm_stderr,none": 0.022615961145736815,
						"acc_stderr,none": 0.022615961145736815,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.031902690392193345,
						"acc_stderr,none": 0.031902690392193345,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.040113743936211456,
						"acc_stderr,none": 0.040113743936211456,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2786885245901639,
						"acc_norm,none": 0.2786885245901639,
						"acc_norm_stderr,none": 0.04075944659069251,
						"acc_stderr,none": 0.04075944659069251,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.22380952380952382,
						"acc_norm,none": 0.22380952380952382,
						"acc_norm_stderr,none": 0.028830375135239766,
						"acc_stderr,none": 0.028830375135239766,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3055555555555556,
						"acc_norm,none": 0.3055555555555556,
						"acc_norm_stderr,none": 0.03443002441392582,
						"acc_stderr,none": 0.03443002441392582,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.291005291005291,
						"acc_norm,none": 0.291005291005291,
						"acc_norm_stderr,none": 0.0331278320035657,
						"acc_stderr,none": 0.0331278320035657,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.041265147363240995,
						"acc_stderr,none": 0.041265147363240995,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.037245636197746325,
						"acc_stderr,none": 0.037245636197746325,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.04176466758604902,
						"acc_stderr,none": 0.04176466758604902,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.032377088536015224,
						"acc_stderr,none": 0.032377088536015224,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.2559241706161137,
						"acc_norm,none": 0.2559241706161137,
						"acc_norm_stderr,none": 0.03011304016776726,
						"acc_stderr,none": 0.03011304016776726,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.26861702127659576,
						"acc_norm,none": 0.26861702127659576,
						"acc_norm_stderr,none": 0.022888827968077067,
						"acc_stderr,none": 0.022888827968077067,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.029406995359394585,
						"acc_stderr,none": 0.029406995359394585,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2471264367816092,
						"acc_norm,none": 0.2471264367816092,
						"acc_norm_stderr,none": 0.03279424038543968,
						"acc_stderr,none": 0.03279424038543968,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.31851851851851853,
						"acc_norm,none": 0.31851851851851853,
						"acc_norm_stderr,none": 0.040247784019771096,
						"acc_stderr,none": 0.040247784019771096,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.23008849557522124,
						"acc_norm,none": 0.23008849557522124,
						"acc_norm_stderr,none": 0.028059284839160172,
						"acc_stderr,none": 0.028059284839160172,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.03401506715249039,
						"acc_stderr,none": 0.03401506715249039,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.03231470996617758,
						"acc_stderr,none": 0.03231470996617758,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.03560846537586734,
						"acc_stderr,none": 0.03560846537586734,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3105590062111801,
						"acc_norm,none": 0.3105590062111801,
						"acc_norm_stderr,none": 0.036581425432887386,
						"acc_stderr,none": 0.036581425432887386,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.28125,
						"acc_norm_stderr,none": 0.03565632932250201,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.007714209864949741,
						"mcc_stderr,none": 0.03082037322747558
					},
					"copa": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.03144660377352202,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 4.036635360763268,
						"likelihood_diff_stderr,none": 0.4901353373352784,
						"pct_stereotype,none": 0.5803518187239117,
						"pct_stereotype_stderr,none": 0.08271671870695962
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.961016696481813,
						"likelihood_diff_stderr,none": 0.09186487690776848,
						"pct_stereotype,none": 0.655933214072749,
						"pct_stereotype_stderr,none": 0.011604172587877416
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.59478021978022,
						"likelihood_diff_stderr,none": 0.4192199273038486,
						"pct_stereotype,none": 0.7252747252747253,
						"pct_stereotype_stderr,none": 0.047052133987784364
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 4.556818181818182,
						"likelihood_diff_stderr,none": 1.4700962047565695,
						"pct_stereotype,none": 0.6363636363636364,
						"pct_stereotype_stderr,none": 0.15212000482437738
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.838461538461538,
						"likelihood_diff_stderr,none": 0.6812749371148393,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.887109375,
						"likelihood_diff_stderr,none": 0.17876481571466685,
						"pct_stereotype,none": 0.64375,
						"pct_stereotype_stderr,none": 0.026812710310024225
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.868634259259259,
						"likelihood_diff_stderr,none": 0.2363810679911115,
						"pct_stereotype,none": 0.6157407407407407,
						"pct_stereotype_stderr,none": 0.03317354514310742
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.444444444444445,
						"likelihood_diff_stderr,none": 0.36083450643798126,
						"pct_stereotype,none": 0.7638888888888888,
						"pct_stereotype_stderr,none": 0.050401578099733044
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.640748031496063,
						"likelihood_diff_stderr,none": 0.14945773321224962,
						"pct_stereotype,none": 0.5787401574803149,
						"pct_stereotype_stderr,none": 0.021928698676414303
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.024774774774775,
						"likelihood_diff_stderr,none": 0.3773263217713607,
						"pct_stereotype,none": 0.7387387387387387,
						"pct_stereotype_stderr,none": 0.041887708614323955
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 5.024193548387097,
						"likelihood_diff_stderr,none": 0.5094409427601696,
						"pct_stereotype,none": 0.7956989247311828,
						"pct_stereotype_stderr,none": 0.04203545939892302
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.543421052631579,
						"likelihood_diff_stderr,none": 0.2650026022116414,
						"pct_stereotype,none": 0.6684210526315789,
						"pct_stereotype_stderr,none": 0.0342442478876195
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 4.125819916517591,
						"likelihood_diff_stderr,none": 0.09453741066655885,
						"pct_stereotype,none": 0.5080500894454383,
						"pct_stereotype_stderr,none": 0.01221171617623539
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.9583333333333335,
						"likelihood_diff_stderr,none": 0.3556986583901103,
						"pct_stereotype,none": 0.4444444444444444,
						"pct_stereotype_stderr,none": 0.052671718126664185
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 4.211538461538462,
						"likelihood_diff_stderr,none": 1.1759632419217385,
						"pct_stereotype,none": 0.46153846153846156,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.462121212121212,
						"likelihood_diff_stderr,none": 0.5372283404202104,
						"pct_stereotype,none": 0.5606060606060606,
						"pct_stereotype_stderr,none": 0.06156009014560979
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.8399532710280373,
						"likelihood_diff_stderr,none": 0.20680223728381336,
						"pct_stereotype,none": 0.514018691588785,
						"pct_stereotype_stderr,none": 0.02793986154930238
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.426383399209486,
						"likelihood_diff_stderr,none": 0.2570695871968478,
						"pct_stereotype,none": 0.3241106719367589,
						"pct_stereotype_stderr,none": 0.02948384978103373
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 4.005208333333333,
						"likelihood_diff_stderr,none": 0.5057162818439075,
						"pct_stereotype,none": 0.5694444444444444,
						"pct_stereotype_stderr,none": 0.05876396677084613
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.5521739130434784,
						"likelihood_diff_stderr,none": 0.16646759674417352,
						"pct_stereotype,none": 0.49782608695652175,
						"pct_stereotype_stderr,none": 0.023337780813399874
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 4.208695652173913,
						"likelihood_diff_stderr,none": 0.31885405803284694,
						"pct_stereotype,none": 0.5652173913043478,
						"pct_stereotype_stderr,none": 0.046429222863564275
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 5.587912087912088,
						"likelihood_diff_stderr,none": 0.4724939292975527,
						"pct_stereotype,none": 0.7582417582417582,
						"pct_stereotype_stderr,none": 0.04513082148355002
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.378826530612245,
						"likelihood_diff_stderr,none": 0.2820998311933945,
						"pct_stereotype,none": 0.5867346938775511,
						"pct_stereotype_stderr,none": 0.03526290219436086
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.050688976377952756,
						"exact_match_stderr,none": 0.004867501128272265
					},
					"glue": {
						"acc,none": 0.479206309148114,
						"acc_stderr,none": 0.054810303964793076,
						"alias": "glue",
						"f1,none": 0.4717161946832843,
						"f1_stderr,none": 0.0011105112327696215,
						"mcc,none": -0.007714209864949741,
						"mcc_stderr,none": 0.0009498954058808936
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.06595905989385899,
						"exact_match_stderr,get-answer": 0.006836951192034179
					},
					"hellaswag": {
						"acc,none": 0.5811591316470822,
						"acc_norm,none": 0.770762796255726,
						"acc_norm_stderr,none": 0.004194830716126044,
						"acc_stderr,none": 0.0049236092078615325,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.20239676580999133,
						"acc_norm,none": 0.20239676580999133,
						"acc_norm_stderr,none": 0.027636493161657106,
						"acc_stderr,none": 0.027636493161657106,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.0416333199893227,
						"acc_stderr,none": 0.0416333199893227,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.19,
						"acc_norm,none": 0.19,
						"acc_norm_stderr,none": 0.012411851354816322,
						"acc_stderr,none": 0.012411851354816322,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.203,
						"acc_norm,none": 0.203,
						"acc_norm_stderr,none": 0.01272607374459826,
						"acc_stderr,none": 0.01272607374459826,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.233,
						"acc_norm,none": 0.233,
						"acc_norm_stderr,none": 0.013374972519220084,
						"acc_stderr,none": 0.013374972519220084,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.013663187134877628,
						"acc_stderr,none": 0.013663187134877628,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.24166666666666667,
						"acc_norm,none": 0.24166666666666667,
						"acc_norm_stderr,none": 0.0174914187827403,
						"acc_stderr,none": 0.0174914187827403,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.196,
						"acc_norm,none": 0.196,
						"acc_norm_stderr,none": 0.012559527926707387,
						"acc_stderr,none": 0.012559527926707387,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.187,
						"acc_norm,none": 0.187,
						"acc_norm_stderr,none": 0.01233625482807411,
						"acc_stderr,none": 0.01233625482807411,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.188,
						"acc_norm,none": 0.188,
						"acc_norm_stderr,none": 0.012361586015103758,
						"acc_stderr,none": 0.012361586015103758,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.029601626330440625,
						"acc_stderr,none": 0.029601626330440625,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.162,
						"acc_norm,none": 0.162,
						"acc_norm_stderr,none": 0.011657267771304401,
						"acc_stderr,none": 0.011657267771304401,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3230769230769231,
						"acc_norm,none": 0.3230769230769231,
						"acc_norm_stderr,none": 0.04117444688605598,
						"acc_stderr,none": 0.04117444688605598,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.040936018074033256,
						"acc_stderr,none": 0.040936018074033256,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.183,
						"acc_norm,none": 0.183,
						"acc_norm_stderr,none": 0.012233587399477823,
						"acc_stderr,none": 0.012233587399477823,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.208,
						"acc_norm,none": 0.208,
						"acc_norm_stderr,none": 0.012841374572096925,
						"acc_stderr,none": 0.012841374572096925,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.232,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.013354937452281569,
						"acc_stderr,none": 0.013354937452281569,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.184,
						"acc_norm,none": 0.184,
						"acc_norm_stderr,none": 0.012259457340938572,
						"acc_stderr,none": 0.012259457340938572,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.179,
						"acc_norm,none": 0.179,
						"acc_norm_stderr,none": 0.012128730605719116,
						"acc_stderr,none": 0.012128730605719116,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.169,
						"acc_norm,none": 0.169,
						"acc_norm_stderr,none": 0.011856625977890119,
						"acc_stderr,none": 0.011856625977890119,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.196,
						"acc_norm,none": 0.196,
						"acc_norm_stderr,none": 0.012559527926707389,
						"acc_stderr,none": 0.012559527926707389,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.209,
						"acc_norm,none": 0.209,
						"acc_norm_stderr,none": 0.012864077288499354,
						"acc_stderr,none": 0.012864077288499354,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.042923469599092816,
						"acc_stderr,none": 0.042923469599092816,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.164,
						"acc_norm,none": 0.164,
						"acc_norm_stderr,none": 0.01171500069318133,
						"acc_stderr,none": 0.01171500069318133,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.212,
						"acc_norm,none": 0.212,
						"acc_norm_stderr,none": 0.012931481864938041,
						"acc_stderr,none": 0.012931481864938041,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.183,
						"acc_norm,none": 0.183,
						"acc_norm_stderr,none": 0.012233587399477826,
						"acc_stderr,none": 0.012233587399477826,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.224,
						"acc_norm,none": 0.224,
						"acc_norm_stderr,none": 0.013190830072364455,
						"acc_stderr,none": 0.013190830072364455,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.171,
						"acc_norm,none": 0.171,
						"acc_norm_stderr,none": 0.011912216456264607,
						"acc_stderr,none": 0.011912216456264607,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.218,
						"acc_norm,none": 0.218,
						"acc_norm_stderr,none": 0.013063179040595282,
						"acc_stderr,none": 0.013063179040595282,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.01664216340116628,
						"acc_stderr,none": 0.01664216340116628,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.167,
						"acc_norm,none": 0.167,
						"acc_norm_stderr,none": 0.011800434324644608,
						"acc_stderr,none": 0.011800434324644608,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.187,
						"acc_norm,none": 0.187,
						"acc_norm_stderr,none": 0.01233625482807411,
						"acc_stderr,none": 0.01233625482807411,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.209,
						"acc_norm,none": 0.209,
						"acc_norm_stderr,none": 0.012864077288499334,
						"acc_stderr,none": 0.012864077288499334,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.012997843819031815,
						"acc_stderr,none": 0.012997843819031815,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.0446196043338474,
						"acc_stderr,none": 0.0446196043338474,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22333333333333333,
						"acc_norm,none": 0.22333333333333333,
						"acc_norm_stderr,none": 0.024085657867318578,
						"acc_stderr,none": 0.024085657867318578,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.013569640199177445,
						"acc_stderr,none": 0.013569640199177445,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.168,
						"acc_norm,none": 0.168,
						"acc_norm_stderr,none": 0.011828605831454248,
						"acc_stderr,none": 0.011828605831454248,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.212,
						"acc_norm,none": 0.212,
						"acc_norm_stderr,none": 0.012931481864938022,
						"acc_stderr,none": 0.012931481864938022,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.17,
						"acc_norm,none": 0.17,
						"acc_norm_stderr,none": 0.02662790314934043,
						"acc_stderr,none": 0.02662790314934043,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.222,
						"acc_norm,none": 0.222,
						"acc_norm_stderr,none": 0.013148721948877364,
						"acc_stderr,none": 0.013148721948877364,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.234,
						"acc_norm,none": 0.234,
						"acc_norm_stderr,none": 0.013394902889660013,
						"acc_stderr,none": 0.013394902889660013,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.029601626330440615,
						"acc_stderr,none": 0.029601626330440615,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.012997843819031825,
						"acc_stderr,none": 0.012997843819031825,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5036176277132208,
						"acc_norm,none": 0.444,
						"acc_norm_stderr,none": 0.0004947174348697385,
						"acc_stderr,none": 0.041210441738356404,
						"alias": "kobest",
						"f1,none": 0.4133437594436588,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5220797720797721,
						"acc_stderr,none": 0.013335745662578467,
						"alias": " - kobest_boolq",
						"f1,none": 0.3901607126327108,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.015773547629015103,
						"alias": " - kobest_copa",
						"f1,none": 0.5369330938482263,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.364,
						"acc_norm,none": 0.444,
						"acc_norm_stderr,none": 0.022242244375731027,
						"acc_stderr,none": 0.021539170637317702,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.35900217794627737,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5768261964735516,
						"acc_stderr,none": 0.024827573845811274,
						"alias": " - kobest_sentineg",
						"f1,none": 0.523327615780446,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6535998447506307,
						"acc_stderr,none": 0.014074500863608893,
						"alias": "lambada",
						"perplexity,none": 4.126279971628858,
						"perplexity_stderr,none": 0.25584729597230355
					},
					"lambada_cloze": {
						"acc,none": 0.31651465165922765,
						"acc_stderr,none": 0.06251237921593697,
						"alias": "lambada_cloze",
						"perplexity,none": 42.093317573255824,
						"perplexity_stderr,none": 11.643775612799784
					},
					"lambada_multilingual": {
						"acc,none": 0.4358237919658451,
						"acc_stderr,none": 0.07212349125264439,
						"alias": "lambada_multilingual",
						"perplexity,none": 56.74911050585341,
						"perplexity_stderr,none": 18.34058806043957
					},
					"lambada_openai": {
						"acc,none": 0.6768872501455463,
						"acc_stderr,none": 0.00651549307324997,
						"alias": " - lambada_openai",
						"perplexity,none": 3.6551778507970356,
						"perplexity_stderr,none": 0.08614789189662851
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.4409082088104017,
						"acc_stderr,none": 0.0069171584329752915,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 18.985070957666377,
						"perplexity_stderr,none": 0.5489562379016688
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.32078400931496215,
						"acc_stderr,none": 0.006503129154270499,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 89.58954108017518,
						"perplexity_stderr,none": 5.7043078106079435
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6776634969920434,
						"acc_stderr,none": 0.00651139233729141,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.6516840908396158,
						"perplexity_stderr,none": 0.08625775395434616
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.3518338831748496,
						"acc_stderr,none": 0.0066531002239743516,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 78.16952339551807,
						"perplexity_stderr,none": 4.618402738323029
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.4356685425965457,
						"acc_stderr,none": 0.006908079137757327,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 43.25025116719301,
						"perplexity_stderr,none": 2.5474909235310292
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.39316902775082474,
						"acc_stderr,none": 0.006805116923096298,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 69.08455279554116,
						"perplexity_stderr,none": 4.386909289844354
					},
					"lambada_standard": {
						"acc,none": 0.6287599456627208,
						"acc_stderr,none": 0.0067310361108789294,
						"alias": " - lambada_standard",
						"perplexity,none": 4.596802920724559,
						"perplexity_stderr,none": 0.11300339014750194
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.19212109450805356,
						"acc_stderr,none": 0.0054887409382452035,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 65.20156418884527,
						"perplexity_stderr,none": 1.9577101705221365
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.272264631043257,
						"exact_match_stderr,get-answer": 0.011230375109327478
					},
					"logiqa": {
						"acc,none": 0.2565284178187404,
						"acc_norm,none": 0.2749615975422427,
						"acc_norm_stderr,none": 0.017512971782225207,
						"acc_stderr,none": 0.01712944332788756,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2652671755725191,
						"acc_norm,none": 0.294529262086514,
						"acc_norm_stderr,none": 0.011500471190116972,
						"acc_stderr,none": 0.011138286518433165,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.28509212730318256,
						"acc_norm,none": 0.27671691792294806,
						"acc_norm_stderr,none": 0.008189786871508207,
						"acc_stderr,none": 0.008264531564961743,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.5781614064816776,
						"acc_stderr,none": 0.005082632456610941,
						"alias": "mc_taco",
						"f1,none": 0.5346418974179227,
						"f1_stderr,none": 0.006537459182918326
					},
					"medmcqa": {
						"acc,none": 0.29691608893138893,
						"acc_norm,none": 0.29691608893138893,
						"acc_norm_stderr,none": 0.007065264602860563,
						"acc_stderr,none": 0.007065264602860563,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.2608012568735271,
						"acc_norm,none": 0.2608012568735271,
						"acc_norm_stderr,none": 0.01231095926370882,
						"acc_stderr,none": 0.01231095926370882,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.33371314627545934,
						"acc_stderr,none": 0.050657906692161726,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.32592592592592595,
						"acc_stderr,none": 0.040491220417025055,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3355263157894737,
						"acc_stderr,none": 0.03842498559395268,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3433962264150943,
						"acc_stderr,none": 0.02922452646912479,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.03852084696008534,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.28901734104046245,
						"acc_stderr,none": 0.034564257450869995,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.04092563958237655,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.049756985195624284,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3148936170212766,
						"acc_stderr,none": 0.030363582197238174,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.30701754385964913,
						"acc_stderr,none": 0.0433913832257986,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.3448275862068966,
						"acc_stderr,none": 0.03960933549451208,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.29365079365079366,
						"acc_stderr,none": 0.02345603738398203,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.25396825396825395,
						"acc_stderr,none": 0.03893259610604673,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.048523658709391,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3709677419354839,
						"acc_stderr,none": 0.02748054188795359,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.270935960591133,
						"acc_stderr,none": 0.03127090713297698,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.3212121212121212,
						"acc_stderr,none": 0.036462049632538115,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.40404040404040403,
						"acc_stderr,none": 0.034961309720561266,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.44041450777202074,
						"acc_stderr,none": 0.03582724530036094,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3282051282051282,
						"acc_stderr,none": 0.023807633198657262,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24444444444444444,
						"acc_stderr,none": 0.02620276653465215,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3697478991596639,
						"acc_stderr,none": 0.03135709599613591,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.31125827814569534,
						"acc_stderr,none": 0.03780445850526732,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3302752293577982,
						"acc_stderr,none": 0.02016446633634298,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.031141447823536048,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.35294117647058826,
						"acc_stderr,none": 0.033540924375915195,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3291139240506329,
						"acc_stderr,none": 0.03058732629470236,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.4125560538116592,
						"acc_stderr,none": 0.03304062175449297,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.37404580152671757,
						"acc_stderr,none": 0.04243869242230524,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3128586609989373,
						"acc_stderr,none": 0.04127974123644089,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.35537190082644626,
						"acc_stderr,none": 0.04369236326573981,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.37037037037037035,
						"acc_stderr,none": 0.04668408033024931,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.26380368098159507,
						"acc_stderr,none": 0.03462419931615623,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.375,
						"acc_stderr,none": 0.04595091388086298,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.30097087378640774,
						"acc_stderr,none": 0.04541609446503948,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.405982905982906,
						"acc_stderr,none": 0.03217180182641086,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939099,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4278416347381865,
						"acc_stderr,none": 0.017692787927803728,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.32947976878612717,
						"acc_stderr,none": 0.025305258131879706,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2636871508379888,
						"acc_stderr,none": 0.014736926383761992,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3202614379084967,
						"acc_stderr,none": 0.026716118380156847,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3656260057933698,
						"acc_stderr,none": 0.048435890587305255,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.36012861736334406,
						"acc_stderr,none": 0.02726429759980401,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3487654320987654,
						"acc_stderr,none": 0.02651759772446501,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.3120567375886525,
						"acc_stderr,none": 0.02764012054516992,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.30182529335071706,
						"acc_stderr,none": 0.011724350518105886,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.25735294117647056,
						"acc_stderr,none": 0.026556519470041513,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.31862745098039214,
						"acc_stderr,none": 0.018850084696468702,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.0469237132203465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.4204081632653061,
						"acc_stderr,none": 0.03160106993449604,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.36139096522586933,
						"acc_stderr,none": 0.048509082153170346,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.39303482587064675,
						"acc_stderr,none": 0.0345368246603156,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.3063748810656518,
						"acc_stderr,none": 0.0527897662798877,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.46,
						"acc_stderr,none": 0.05009082659620333,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.43373493975903615,
						"acc_stderr,none": 0.03858158940685516,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.4269005847953216,
						"acc_stderr,none": 0.03793620616529917,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.4214977075904228,
						"acc_stderr,none": 0.004984563394336747,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.4268714401952807,
						"acc_stderr,none": 0.004988566051096133,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6813725490196079,
						"acc_stderr,none": 0.02309599657184148,
						"alias": "mrpc",
						"f1,none": 0.8104956268221575,
						"f1_stderr,none": 0.016296862947682258
					},
					"multimedqa": {
						"acc,none": 0.3274662881476224,
						"acc_norm,none": 0.285780658046265,
						"acc_norm_stderr,none": 0.00012215371299257569,
						"acc_stderr,none": 0.10854710136870047,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5719884488448845,
						"acc_stderr,none": 0.007106976252751528,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7202784063517375,
						"mrr_stderr,none": 0.010261440219006313,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4051918735891648,
						"r@2_stderr,none": 0.01650240246733025
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6700526728856107,
						"mrr_stderr,none": 0.01056977924241828,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4435665914221219,
						"r@2_stderr,none": 0.01669991949628019
					},
					"openbookqa": {
						"acc,none": 0.32,
						"acc_norm,none": 0.428,
						"acc_norm_stderr,none": 0.022149790663861926,
						"acc_stderr,none": 0.020882340488761808,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.010909147755547945,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.301,
						"acc_stderr,none": 0.010259245881790259,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.344,
						"acc_stderr,none": 0.010624897374662594,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.4755,
						"acc_stderr,none": 0.011169702598013186,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.4915,
						"acc_stderr,none": 0.011181519941139164,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.4715,
						"acc_stderr,none": 0.011164954236428807,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.011183136021064612,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.42478571428571427,
						"acc_stderr,none": 0.056278187344401,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7899891186071817,
						"acc_norm,none": 0.8046789989118607,
						"acc_norm_stderr,none": 0.009249776222397582,
						"acc_stderr,none": 0.009503353305818562,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.29766225448334754,
						"acc_norm,none": 0.3200789923142613,
						"acc_norm_stderr,none": 0.00340824625974014,
						"acc_stderr,none": 0.0033404735647815276,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.019536923574747615,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7354546405044216,
						"acc_norm,none": 0.6355225951200462,
						"acc_norm_stderr,none": 0.00417579266980185,
						"acc_stderr,none": 0.14075002403470363,
						"alias": "pythia",
						"bits_per_byte,none": 0.6448255664376533,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5635502288907681,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.6551778507970356,
						"perplexity_stderr,none": 0.08614789189662851,
						"word_perplexity,none": 10.914490553993597,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4308510638297872,
						"acc_norm,none": 0.4734042553191489,
						"acc_norm_stderr,none": 0.04792229120706373,
						"acc_stderr,none": 0.039126933775720726,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4666666666666667,
						"acc_norm,none": 0.5666666666666667,
						"acc_norm_stderr,none": 0.04542567625794981,
						"acc_stderr,none": 0.0457329560380023,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.45625,
						"acc_norm,none": 0.50625,
						"acc_norm_stderr,none": 0.03964948130713095,
						"acc_stderr,none": 0.039500492593059405,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.4014084507042254,
						"acc_norm,none": 0.4154929577464789,
						"acc_norm_stderr,none": 0.029294324623678564,
						"acc_stderr,none": 0.02913837502274766,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.49679663188724144,
						"acc_stderr,none": 0.006765271702920652,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.4926787039327232,
						"acc_stderr,none": 0.0024864340362504506,
						"alias": "qqp",
						"f1,none": 0.4690258613994667,
						"f1_stderr,none": 0.0031396355425752556
					},
					"race": {
						"acc,none": 0.41244019138755983,
						"acc_stderr,none": 0.015235484892818531,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.6823104693140795,
						"acc_stderr,none": 0.02802450356245461,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.95,
						"acc_norm,none": 0.919,
						"acc_norm_stderr,none": 0.00863212103213995,
						"acc_stderr,none": 0.006895472974897906,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6787003610108303,
						"acc_stderr,none": 0.02810862605328869,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8623853211009175,
						"acc_stderr,none": 0.011672771413875132,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.573627911626512,
						"acc_norm,none": 0.7614715585324403,
						"acc_norm_stderr,none": 0.003013197863332252,
						"acc_stderr,none": 0.0034965540263179763,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.6888289907157832,
						"acc_stderr,none": 0.05356764476381529,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.6535456730769231,
						"acc_stderr,none": 0.004762450021710565,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.830546265328874,
						"acc_stderr,none": 0.0037769125706723903,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5862745098039216,
						"acc_stderr,none": 0.004876720323501084,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3103004502815094,
						"acc_stderr,none": 0.04143104790883736,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.0002590774867436335,
						"bleu_diff,none": -5.670405867003985,
						"bleu_diff_stderr,none": 0.39849578962290877,
						"bleu_max,none": 18.104395474342578,
						"bleu_max_stderr,none": 0.5328426786199876,
						"rouge1_acc,none": 0.2974296205630355,
						"rouge1_acc_stderr,none": 0.0002560848546259371,
						"rouge1_diff,none": -7.709133785247639,
						"rouge1_diff_stderr,none": 0.4709516591920785,
						"rouge1_max,none": 38.430354870153614,
						"rouge1_max_stderr,none": 0.9162227976556272,
						"rouge2_acc,none": 0.22643818849449204,
						"rouge2_acc_stderr,none": 0.00021466168539929287,
						"rouge2_diff,none": -8.748756247459179,
						"rouge2_diff_stderr,none": 0.6146014745962853,
						"rouge2_max,none": 25.047562880936276,
						"rouge2_max_stderr,none": 0.8745852039294537,
						"rougeL_acc,none": 0.2864137086903305,
						"rougeL_acc_stderr,none": 0.0002504667845154174,
						"rougeL_diff,none": -8.036232250881685,
						"rougeL_diff_stderr,none": 0.4933861748879933,
						"rougeL_max,none": 36.011171090048414,
						"rougeL_max_stderr,none": 0.8814889440758711
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.30354957160342716,
						"bleu_acc_stderr,none": 0.01609588415538685,
						"bleu_diff,none": -5.670405867003985,
						"bleu_diff_stderr,none": 0.6312652292205779,
						"bleu_max,none": 18.104395474342578,
						"bleu_max_stderr,none": 0.7299607377249735,
						"rouge1_acc,none": 0.2974296205630355,
						"rouge1_acc_stderr,none": 0.016002651487360995,
						"rouge1_diff,none": -7.709133785247639,
						"rouge1_diff_stderr,none": 0.6862591778563537,
						"rouge1_max,none": 38.430354870153614,
						"rouge1_max_stderr,none": 0.9571952766576041,
						"rouge2_acc,none": 0.22643818849449204,
						"rouge2_acc_stderr,none": 0.014651337324602587,
						"rouge2_diff,none": -8.748756247459179,
						"rouge2_diff_stderr,none": 0.7839652253743691,
						"rouge2_max,none": 25.047562880936276,
						"rouge2_max_stderr,none": 0.9351926025848653,
						"rougeL_acc,none": 0.2864137086903305,
						"rougeL_acc_stderr,none": 0.01582614243950235,
						"rougeL_diff,none": -8.036232250881685,
						"rougeL_diff_stderr,none": 0.7024145320877077,
						"rougeL_max,none": 36.011171090048414,
						"rougeL_max_stderr,none": 0.9388764264139723
					},
					"truthfulqa_mc1": {
						"acc,none": 0.22766217870257038,
						"acc_stderr,none": 0.014679255032111068,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3516195860709788,
						"acc_stderr,none": 0.013780188685317316,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.050688976377952756,
						"exact_match_stderr,none": 0.004867501128272265
					},
					"wic": {
						"acc,none": 0.4843260188087774,
						"acc_stderr,none": 0.019800984955347854,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6448255664376533,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5635502288907681,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.914490553993597,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6771902131018153,
						"acc_stderr,none": 0.01314049817335794,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4647887323943662,
						"acc_stderr,none": 0.0596130578497224,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6346153846153846,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8681318681318682,
						"acc_stderr,none": 0.020515321360773598,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5370909090909091,
						"acc_stderr,none": 0.041918525545404726,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.47,
						"acc_stderr,none": 0.022342748192502846,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.022270877485360434,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.604,
						"acc_stderr,none": 0.021893529941665817,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231336,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.02229623834840706,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.02229623834840705,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668093,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.022361396739207878,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.02168382753928613,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.394136546184739,
						"acc_stderr,none": 0.052367471170260985,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3349397590361446,
						"acc_stderr,none": 0.009460223484996469,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3751004016064257,
						"acc_stderr,none": 0.009704349720814059,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.45903614457831327,
						"acc_stderr,none": 0.009988381409296447,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3522088353413655,
						"acc_stderr,none": 0.009574259292495736,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5506024096385542,
						"acc_stderr,none": 0.009970615649588139,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.44859437751004017,
						"acc_stderr,none": 0.009968964736894265,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4706827309236948,
						"acc_stderr,none": 0.01000483004554398,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3598393574297189,
						"acc_stderr,none": 0.009620250217765995,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.43373493975903615,
						"acc_stderr,none": 0.009933667945702091,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3381526104417671,
						"acc_stderr,none": 0.009482500057981022,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3606425702811245,
						"acc_stderr,none": 0.009624937202075311,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.378714859437751,
						"acc_stderr,none": 0.00972275199000058,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939167,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.35582329317269074,
						"acc_stderr,none": 0.009596375814335282,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.35943775100401604,
						"acc_stderr,none": 0.009617895762902742,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5674147163227242,
						"acc_stderr,none": 0.08216700222453983,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.49503639973527463,
						"acc_stderr,none": 0.012866491277589943,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7981469225678358,
						"acc_stderr,none": 0.010329293923393247,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6644606221045665,
						"acc_stderr,none": 0.012151164438163905,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5016545334215751,
						"acc_stderr,none": 0.012867054869163343,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5089344804765056,
						"acc_stderr,none": 0.012865070917320809,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5605559232296492,
						"acc_stderr,none": 0.01277240869797914,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.48378557246856385,
						"acc_stderr,none": 0.012860357805055874,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5896757114493713,
						"acc_stderr,none": 0.01265848580066339,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.49172733289212445,
						"acc_stderr,none": 0.012865364020375403,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5314361350099271,
						"acc_stderr,none": 0.012841668760976905,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6161482461945731,
						"acc_stderr,none": 0.012515145391728873,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7680377612946729,
						"acc_stderr,none": 0.07631041097011208,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8701075268817204,
						"acc_stderr,none": 0.006973653965627702,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6626506024096386,
						"acc_stderr,none": 0.052212602620321284,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5995828988529719,
						"acc_stderr,none": 0.01583062906365963,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6996197718631179,
						"acc_stderr,none": 0.028321487720855753,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6952380952380952,
						"acc_stderr,none": 0.025976599352305365,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7162698412698413,
						"acc_stderr,none": 0.020100510648841066,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "mosaicml/mpt-7b-instruct"
	},
	"rwkv-x-dev/v5-Eagle-7B-1_0T-HF": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.624859075535513,
						"acc_norm,none": 0.6240135287485907,
						"acc_norm_stderr,none": 0.09153717414500881,
						"acc_stderr,none": 0.1074668322381248,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.359375,
						"acc_stderr,none": 0.016665300803532592,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.24505,
						"acc_stderr,none": 0.15252745892024416,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8386865671641791,
						"acc_stderr,none": 0.14955502218955172,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.29271916790490343,
						"acc_norm,none": 0.29271916790490343,
						"acc_norm_stderr,none": 0.12817382555486206,
						"acc_stderr,none": 0.12817382555486206,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.30322914867898465,
						"acc_norm,none": 0.30322914867898465,
						"acc_norm_stderr,none": 0.05717056223972768,
						"acc_stderr,none": 0.05717056223972768,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.407656902206321,
						"likelihood_diff_stderr,none": 0.5565249799052954,
						"pct_stereotype,none": 0.6234347048300537,
						"pct_stereotype_stderr,none": 0.06837641099061875
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"glue": {
						"acc,none": 0.5761523344449738,
						"acc_stderr,none": 0.014028021819876564,
						"alias": "glue",
						"f1,none": 0.6882502219987833,
						"f1_stderr,none": 0.00021238381450874074,
						"mcc,none": 0.00286100001416597,
						"mcc_stderr,none": 0.030802167125592427
					},
					"kmmlu": {
						"acc,none": 0.12705746462604678,
						"acc_norm,none": 0.12705746462604678,
						"acc_norm_stderr,none": 0.05366844030122934,
						"acc_stderr,none": 0.05366844030122934,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.5362859022144266,
						"acc_norm,none": 0.568,
						"acc_norm_stderr,none": 0.0004917354709418831,
						"acc_stderr,none": 0.0446103990286613,
						"alias": "kobest",
						"f1,none": 0.45296706369731293,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7124005433727926,
						"acc_stderr,none": 0.01612482891631054,
						"alias": "lambada",
						"perplexity,none": 3.8058609547979567,
						"perplexity_stderr,none": 0.2287782595118224
					},
					"lambada_cloze": {
						"acc,none": 0.07413157384048127,
						"acc_stderr,none": 0.004277624730313147,
						"alias": "lambada_cloze",
						"perplexity,none": 375.2091205821181,
						"perplexity_stderr,none": 119.65724494959274
					},
					"lambada_multilingual": {
						"acc,none": 0.5369687560644284,
						"acc_stderr,none": 0.08450595721892018,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.001255003421633,
						"perplexity_stderr,none": 8.211457257577647
					},
					"mmlu": {
						"acc,none": 0.33086454920951425,
						"acc_stderr,none": 0.060605075693583886,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.32561105207226343,
						"acc_stderr,none": 0.0607809456258765,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3662697135500483,
						"acc_stderr,none": 0.049834862065929936,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.35294117647058826,
						"acc_stderr,none": 0.04972757753075276,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.28227085315572464,
						"acc_stderr,none": 0.060616974885657864,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.35358410220014197,
						"acc_norm,none": 0.32068854393146246,
						"acc_norm_stderr,none": 0.00010226000674634883,
						"acc_stderr,none": 0.09857919062749855,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4554285714285714,
						"acc_stderr,none": 0.053940079204857536,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7433912692426552,
						"acc_norm,none": 0.6280221175210029,
						"acc_norm_stderr,none": 0.010330170441803736,
						"acc_stderr,none": 0.14350617224236548,
						"alias": "pythia",
						"bits_per_byte,none": 0.6325467753027727,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5502993074551348,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3763932503864287,
						"perplexity_stderr,none": 0.06622884189802866,
						"word_perplexity,none": 10.428882412474298,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3900709219858156,
						"acc_norm,none": 0.4308510638297872,
						"acc_norm_stderr,none": 0.06166073934278518,
						"acc_stderr,none": 0.04155271878590972,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.647166483644471,
						"acc_stderr,none": 0.07235531218680337,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3318201004314263,
						"acc_stderr,none": 0.0016185678964646094,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.36107711138310894,
						"bleu_acc_stderr,none": 0.016814312844836882,
						"bleu_diff,none": -4.207307662674644,
						"bleu_diff_stderr,none": 0.9282874485829784,
						"bleu_max,none": 28.968049598440547,
						"bleu_max_stderr,none": 0.8184806122513136,
						"rouge1_acc,none": 0.3219094247246022,
						"rouge1_acc_stderr,none": 0.01635556761196041,
						"rouge1_diff,none": -5.379507228349093,
						"rouge1_diff_stderr,none": 1.0701075356835896,
						"rouge1_max,none": 54.33538941041346,
						"rouge1_max_stderr,none": 0.864656933048363,
						"rouge2_acc,none": 0.2974296205630355,
						"rouge2_acc_stderr,none": 0.016002651487361,
						"rouge2_diff,none": -6.972999086815549,
						"rouge2_diff_stderr,none": 1.2603592214712704,
						"rouge2_max,none": 38.77260507597626,
						"rouge2_max_stderr,none": 1.050835368985665,
						"rougeL_acc,none": 0.3329253365973072,
						"rougeL_acc_stderr,none": 0.016497402382012052,
						"rougeL_diff,none": -5.365626722282412,
						"rougeL_diff_stderr,none": 1.0872530514178778,
						"rougeL_max,none": 51.79395244112797,
						"rougeL_max_stderr,none": 0.8898050908068873
					},
					"xcopa": {
						"acc,none": 0.6223636363636363,
						"acc_stderr,none": 0.07216843330465124,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.44112449799196785,
						"acc_stderr,none": 0.0497121896277638,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.6328740749654053,
						"acc_stderr,none": 0.05996642434269435,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.8035513598561475,
						"acc_stderr,none": 0.035647216912070484,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.624859075535513,
						"acc_norm,none": 0.6240135287485907,
						"acc_norm_stderr,none": 0.09153717414500881,
						"acc_stderr,none": 0.1074668322381248,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.359375,
						"acc_stderr,none": 0.016665300803532592,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.376,
						"acc_stderr,none": 0.015325105508898134,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.348,
						"acc_stderr,none": 0.01507060460376841,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.355,
						"acc_stderr,none": 0.013819249004047301,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.39761092150170646,
						"acc_norm,none": 0.4308873720136519,
						"acc_norm_stderr,none": 0.014471133392642483,
						"acc_stderr,none": 0.014301752223279531,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7369528619528619,
						"acc_norm,none": 0.7192760942760943,
						"acc_norm_stderr,none": 0.00922052617471136,
						"acc_stderr,none": 0.00903451489886582,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.24505,
						"acc_stderr,none": 0.15252745892024416,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.009,
						"acc_stderr,none": 0.002112280962711347,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.4815,
						"acc_stderr,none": 0.01117547854278858,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.0565,
						"acc_stderr,none": 0.005164030267562486,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.5945,
						"acc_stderr,none": 0.010981583336946122,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.275,
						"acc_stderr,none": 0.009986859800975589,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.409,
						"acc_stderr,none": 0.010996361215419947,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.104,
						"acc_stderr,none": 0.006827540380973849,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.2995,
						"acc_stderr,none": 0.010244625477565744,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.095,
						"acc_stderr,none": 0.006558125075221689,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.1265,
						"acc_stderr,none": 0.00743481763438961,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0004338394793926247,
						"acc_stderr,none": 0.00043383947939263187,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8386865671641791,
						"acc_stderr,none": 0.14955502218955172,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745892,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.988,
						"acc_stderr,none": 0.003444977194099851,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000091,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495323,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592069,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.783,
						"acc_stderr,none": 0.01304151375727071,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.607,
						"acc_stderr,none": 0.015452824654081496,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.782,
						"acc_stderr,none": 0.01306317904059529,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042962,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469375,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565543,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.005814534272734967,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.005893957816165557,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.0076298239962803134,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178341,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704166,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.794,
						"acc_stderr,none": 0.012795613612786558,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.772,
						"acc_stderr,none": 0.013273740700804487,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.01241185135481633,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240625,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.011828605831454262,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.984,
						"acc_stderr,none": 0.003969856390319419,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.431,
						"acc_stderr,none": 0.015667944488173508,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.01120341539516033,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.795,
						"acc_stderr,none": 0.012772554096113125,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.686,
						"acc_stderr,none": 0.014683991951087966,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.01126614068463216,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557425,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796394,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286427,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024952,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.626,
						"acc_stderr,none": 0.015308767369006363,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.525,
						"acc_stderr,none": 0.015799513429996016,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.608,
						"acc_stderr,none": 0.015445859463771297,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.014632638658632895,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286415,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357793,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.898,
						"acc_stderr,none": 0.009575368801653886,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695804,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.012631649083099163,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380713,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000044,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796394,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319324,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.01578686875935901,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695435,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942312,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565595,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.731,
						"acc_stderr,none": 0.014029819522568196,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.474,
						"acc_stderr,none": 0.01579789775804276,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.011266140684632163,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.006199874066337051,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.636,
						"acc_stderr,none": 0.015222868840522015,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.010640169792499354,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.814,
						"acc_stderr,none": 0.012310790208412803,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.0111717862854965,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286408,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118581,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.0043194510829106135,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452372,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.405,
						"acc_stderr,none": 0.015531136990453045,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.359,
						"acc_stderr,none": 0.015177264224798596,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6810397553516819,
						"acc_stderr,none": 0.00815167862952838,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.16071428571428573,
						"acc_stderr,none": 0.04952230059306299,
						"alias": "cb",
						"f1,none": 0.1572449642625081,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.29271916790490343,
						"acc_norm,none": 0.29271916790490343,
						"acc_norm_stderr,none": 0.12817382555486206,
						"acc_stderr,none": 0.12817382555486206,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.32653061224489793,
						"acc_norm,none": 0.32653061224489793,
						"acc_norm_stderr,none": 0.06768622021133469,
						"acc_stderr,none": 0.06768622021133469,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.5151515151515151,
						"acc_norm,none": 0.5151515151515151,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122592,
						"acc_stderr,none": 0.08503766788122592,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.391304347826087,
						"acc_norm,none": 0.391304347826087,
						"acc_norm_stderr,none": 0.10405096111532161,
						"acc_stderr,none": 0.10405096111532161,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2765957446808511,
						"acc_norm,none": 0.2765957446808511,
						"acc_norm_stderr,none": 0.0659529705144534,
						"acc_stderr,none": 0.0659529705144534,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.13636363636363635,
						"acc_norm,none": 0.13636363636363635,
						"acc_norm_stderr,none": 0.0748867700952649,
						"acc_stderr,none": 0.0748867700952649,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.05443310539518174,
						"acc_stderr,none": 0.05443310539518174,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.3783783783783784,
						"acc_norm,none": 0.3783783783783784,
						"acc_norm_stderr,none": 0.08083044344561426,
						"acc_stderr,none": 0.08083044344561426,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.4827586206896552,
						"acc_norm,none": 0.4827586206896552,
						"acc_norm_stderr,none": 0.09443492370778725,
						"acc_stderr,none": 0.09443492370778725,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.0798889274021794,
						"acc_stderr,none": 0.0798889274021794,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.060073850409370216,
						"acc_stderr,none": 0.060073850409370216,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894599,
						"acc_stderr,none": 0.10540925533894599,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.32653061224489793,
						"acc_norm,none": 0.32653061224489793,
						"acc_norm_stderr,none": 0.06768622021133469,
						"acc_stderr,none": 0.06768622021133469,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.1049727762162956,
						"acc_stderr,none": 0.1049727762162956,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.06206900541120632,
						"acc_stderr,none": 0.06206900541120632,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.5,
						"acc_norm,none": 0.5,
						"acc_norm_stderr,none": 0.07624928516630235,
						"acc_stderr,none": 0.07624928516630235,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.06859222936927092,
						"acc_stderr,none": 0.06859222936927092,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.30322914867898465,
						"acc_norm,none": 0.30322914867898465,
						"acc_norm_stderr,none": 0.05717056223972768,
						"acc_stderr,none": 0.05717056223972768,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.28994082840236685,
						"acc_norm,none": 0.28994082840236685,
						"acc_norm_stderr,none": 0.03500638924911012,
						"acc_stderr,none": 0.03500638924911012,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.3108108108108108,
						"acc_norm,none": 0.3108108108108108,
						"acc_norm_stderr,none": 0.03817320450441154,
						"acc_stderr,none": 0.03817320450441154,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.22560975609756098,
						"acc_norm,none": 0.22560975609756098,
						"acc_norm_stderr,none": 0.03273897454566342,
						"acc_stderr,none": 0.03273897454566342,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.4125,
						"acc_norm,none": 0.4125,
						"acc_norm_stderr,none": 0.039040677866833816,
						"acc_stderr,none": 0.039040677866833816,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03477691162163659,
						"acc_stderr,none": 0.03477691162163659,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.35406698564593303,
						"acc_norm,none": 0.35406698564593303,
						"acc_norm_stderr,none": 0.0331592569829487,
						"acc_stderr,none": 0.0331592569829487,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.21875,
						"acc_norm,none": 0.21875,
						"acc_norm_stderr,none": 0.032784644885244255,
						"acc_stderr,none": 0.032784644885244255,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3511450381679389,
						"acc_norm,none": 0.3511450381679389,
						"acc_norm_stderr,none": 0.04186445163013751,
						"acc_stderr,none": 0.04186445163013751,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.3088235294117647,
						"acc_norm,none": 0.3088235294117647,
						"acc_norm_stderr,none": 0.03976333292288874,
						"acc_stderr,none": 0.03976333292288874,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.04556837693674772,
						"acc_stderr,none": 0.04556837693674772,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.33746130030959753,
						"acc_norm,none": 0.33746130030959753,
						"acc_norm_stderr,none": 0.02635054957781051,
						"acc_stderr,none": 0.02635054957781051,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3137254901960784,
						"acc_norm,none": 0.3137254901960784,
						"acc_norm_stderr,none": 0.03256685484460388,
						"acc_stderr,none": 0.03256685484460388,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.3407821229050279,
						"acc_norm,none": 0.3407821229050279,
						"acc_norm_stderr,none": 0.03552572003977929,
						"acc_stderr,none": 0.03552572003977929,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.270042194092827,
						"acc_norm,none": 0.270042194092827,
						"acc_norm_stderr,none": 0.028900721906293426,
						"acc_stderr,none": 0.028900721906293426,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.041988576623712234,
						"acc_stderr,none": 0.041988576623712234,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.40186915887850466,
						"acc_norm,none": 0.40186915887850466,
						"acc_norm_stderr,none": 0.04761979313593575,
						"acc_stderr,none": 0.04761979313593575,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3867924528301887,
						"acc_norm,none": 0.3867924528301887,
						"acc_norm_stderr,none": 0.04752784159123843,
						"acc_stderr,none": 0.04752784159123843,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.24074074074074073,
						"acc_norm,none": 0.24074074074074073,
						"acc_norm_stderr,none": 0.04133119440243838,
						"acc_stderr,none": 0.04133119440243838,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.04350546818999061,
						"acc_stderr,none": 0.04350546818999061,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.24175824175824176,
						"acc_norm,none": 0.24175824175824176,
						"acc_norm_stderr,none": 0.025960319996852693,
						"acc_stderr,none": 0.025960319996852693,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03308611113236434,
						"acc_stderr,none": 0.03308611113236434,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2982456140350877,
						"acc_norm,none": 0.2982456140350877,
						"acc_norm_stderr,none": 0.035087719298245626,
						"acc_stderr,none": 0.035087719298245626,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.272108843537415,
						"acc_norm,none": 0.272108843537415,
						"acc_norm_stderr,none": 0.036832239154550236,
						"acc_stderr,none": 0.036832239154550236,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2949640287769784,
						"acc_norm,none": 0.2949640287769784,
						"acc_norm_stderr,none": 0.03881956126735706,
						"acc_stderr,none": 0.03881956126735706,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.037676093121953455,
						"acc_stderr,none": 0.037676093121953455,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.32515337423312884,
						"acc_norm,none": 0.32515337423312884,
						"acc_norm_stderr,none": 0.036803503712864616,
						"acc_stderr,none": 0.036803503712864616,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.28488372093023256,
						"acc_norm,none": 0.28488372093023256,
						"acc_norm_stderr,none": 0.0345162887625062,
						"acc_stderr,none": 0.0345162887625062,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.028514456573421415,
						"acc_stderr,none": 0.028514456573421415,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2676767676767677,
						"acc_norm,none": 0.2676767676767677,
						"acc_norm_stderr,none": 0.031544498882702866,
						"acc_stderr,none": 0.031544498882702866,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.42016806722689076,
						"acc_norm,none": 0.42016806722689076,
						"acc_norm_stderr,none": 0.03206183783236152,
						"acc_stderr,none": 0.03206183783236152,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.23043478260869565,
						"acc_norm,none": 0.23043478260869565,
						"acc_norm_stderr,none": 0.027827807522276156,
						"acc_stderr,none": 0.027827807522276156,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.038201699145179055,
						"acc_stderr,none": 0.038201699145179055,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.32867132867132864,
						"acc_norm,none": 0.32867132867132864,
						"acc_norm_stderr,none": 0.0394188850126319,
						"acc_stderr,none": 0.0394188850126319,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2897727272727273,
						"acc_norm,none": 0.2897727272727273,
						"acc_norm_stderr,none": 0.03429323080239875,
						"acc_stderr,none": 0.03429323080239875,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.31543624161073824,
						"acc_norm,none": 0.31543624161073824,
						"acc_norm_stderr,none": 0.03819723167141383,
						"acc_stderr,none": 0.03819723167141383,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552485,
						"acc_stderr,none": 0.03703667194552485,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.3135593220338983,
						"acc_norm,none": 0.3135593220338983,
						"acc_norm_stderr,none": 0.042891223336625726,
						"acc_stderr,none": 0.042891223336625726,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.03494959016177541,
						"acc_stderr,none": 0.03494959016177541,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.040693063197213754,
						"acc_stderr,none": 0.040693063197213754,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.3006993006993007,
						"acc_norm,none": 0.3006993006993007,
						"acc_norm_stderr,none": 0.03848167949490064,
						"acc_stderr,none": 0.03848167949490064,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.042163702135578345,
						"acc_stderr,none": 0.042163702135578345,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2810810810810811,
						"acc_norm,none": 0.2810810810810811,
						"acc_norm_stderr,none": 0.03313956873549873,
						"acc_stderr,none": 0.03313956873549873,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.36627906976744184,
						"acc_norm,none": 0.36627906976744184,
						"acc_norm_stderr,none": 0.036843172681015855,
						"acc_stderr,none": 0.036843172681015855,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.27007299270072993,
						"acc_norm,none": 0.27007299270072993,
						"acc_norm_stderr,none": 0.02192746197287115,
						"acc_stderr,none": 0.02192746197287115,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.40186915887850466,
						"acc_norm,none": 0.40186915887850466,
						"acc_norm_stderr,none": 0.03359314274571839,
						"acc_stderr,none": 0.03359314274571839,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2764227642276423,
						"acc_norm,none": 0.2764227642276423,
						"acc_norm_stderr,none": 0.040490154606224904,
						"acc_stderr,none": 0.040490154606224904,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.32786885245901637,
						"acc_norm,none": 0.32786885245901637,
						"acc_norm_stderr,none": 0.04267606874299955,
						"acc_stderr,none": 0.04267606874299955,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.03260773253630123,
						"acc_stderr,none": 0.03260773253630123,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.3111111111111111,
						"acc_norm,none": 0.3111111111111111,
						"acc_norm_stderr,none": 0.03460236918732731,
						"acc_stderr,none": 0.03460236918732731,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.3386243386243386,
						"acc_norm,none": 0.3386243386243386,
						"acc_norm_stderr,none": 0.03451471285997054,
						"acc_stderr,none": 0.03451471285997054,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.0399037253226882,
						"acc_stderr,none": 0.0399037253226882,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.3103448275862069,
						"acc_norm,none": 0.3103448275862069,
						"acc_norm_stderr,none": 0.03855289616378948,
						"acc_stderr,none": 0.03855289616378948,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.37142857142857144,
						"acc_norm,none": 0.37142857142857144,
						"acc_norm_stderr,none": 0.04738035414793429,
						"acc_stderr,none": 0.04738035414793429,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2914285714285714,
						"acc_norm,none": 0.2914285714285714,
						"acc_norm_stderr,none": 0.034449526562290195,
						"acc_stderr,none": 0.034449526562290195,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27014218009478674,
						"acc_norm,none": 0.27014218009478674,
						"acc_norm_stderr,none": 0.030641194076293145,
						"acc_stderr,none": 0.030641194076293145,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2393617021276596,
						"acc_norm,none": 0.2393617021276596,
						"acc_norm_stderr,none": 0.02203437784809352,
						"acc_stderr,none": 0.02203437784809352,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.3706896551724138,
						"acc_norm,none": 0.3706896551724138,
						"acc_norm_stderr,none": 0.03177837449226177,
						"acc_stderr,none": 0.03177837449226177,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.3620689655172414,
						"acc_norm,none": 0.3620689655172414,
						"acc_norm_stderr,none": 0.036539236154659684,
						"acc_stderr,none": 0.036539236154659684,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552003,
						"acc_stderr,none": 0.03853254836552003,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.3274336283185841,
						"acc_norm,none": 0.3274336283185841,
						"acc_norm_stderr,none": 0.031285129400738305,
						"acc_stderr,none": 0.031285129400738305,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.3151515151515151,
						"acc_norm,none": 0.3151515151515151,
						"acc_norm_stderr,none": 0.0362773057502241,
						"acc_stderr,none": 0.0362773057502241,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2810810810810811,
						"acc_norm,none": 0.2810810810810811,
						"acc_norm_stderr,none": 0.03313956873549873,
						"acc_stderr,none": 0.03313956873549873,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.3136094674556213,
						"acc_norm,none": 0.3136094674556213,
						"acc_norm_stderr,none": 0.035795265164562245,
						"acc_stderr,none": 0.035795265164562245,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.3105590062111801,
						"acc_norm,none": 0.3105590062111801,
						"acc_norm_stderr,none": 0.036581425432887386,
						"acc_stderr,none": 0.036581425432887386,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.036121818481912725,
						"acc_stderr,none": 0.036121818481912725,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.00286100001416597,
						"mcc_stderr,none": 0.030802167125592427
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.03487350880197771,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.407656902206321,
						"likelihood_diff_stderr,none": 0.5565249799052954,
						"pct_stereotype,none": 0.6234347048300537,
						"pct_stereotype_stderr,none": 0.06837641099061875
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.6285778175313057,
						"likelihood_diff_stderr,none": 0.0881514378627806,
						"pct_stereotype,none": 0.6386404293381037,
						"pct_stereotype_stderr,none": 0.011734402417305048
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.127747252747253,
						"likelihood_diff_stderr,none": 0.409223715379692,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 7.136363636363637,
						"likelihood_diff_stderr,none": 1.8253622418713462,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.061538461538461,
						"likelihood_diff_stderr,none": 0.6351349126355129,
						"pct_stereotype,none": 0.7692307692307693,
						"pct_stereotype_stderr,none": 0.05266563052934291
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.43828125,
						"likelihood_diff_stderr,none": 0.16244290215486995,
						"pct_stereotype,none": 0.60625,
						"pct_stereotype_stderr,none": 0.02735525815821925
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.587962962962963,
						"likelihood_diff_stderr,none": 0.24315638161386122,
						"pct_stereotype,none": 0.5833333333333334,
						"pct_stereotype_stderr,none": 0.03362277436608043
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.7743055555555554,
						"likelihood_diff_stderr,none": 0.3411785747816224,
						"pct_stereotype,none": 0.7361111111111112,
						"pct_stereotype_stderr,none": 0.05230618728513982
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.3257874015748032,
						"likelihood_diff_stderr,none": 0.15019372833856645,
						"pct_stereotype,none": 0.5374015748031497,
						"pct_stereotype_stderr,none": 0.02214356608896984
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.838963963963964,
						"likelihood_diff_stderr,none": 0.3438963648169378,
						"pct_stereotype,none": 0.7747747747747747,
						"pct_stereotype_stderr,none": 0.03982904640716733
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.826612903225806,
						"likelihood_diff_stderr,none": 0.4054835401879022,
						"pct_stereotype,none": 0.9032258064516129,
						"pct_stereotype_stderr,none": 0.03082364793244869
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.440131578947368,
						"likelihood_diff_stderr,none": 0.2605603536675989,
						"pct_stereotype,none": 0.6789473684210526,
						"pct_stereotype_stderr,none": 0.03396059335824887
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.186680083482409,
						"likelihood_diff_stderr,none": 0.07300461473327673,
						"pct_stereotype,none": 0.6082289803220036,
						"pct_stereotype_stderr,none": 0.01192374558275331
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.188888888888889,
						"likelihood_diff_stderr,none": 0.28314355593854545,
						"pct_stereotype,none": 0.6555555555555556,
						"pct_stereotype_stderr,none": 0.050369697187736755
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.269230769230769,
						"likelihood_diff_stderr,none": 0.4832627643511178,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.015151515151516,
						"likelihood_diff_stderr,none": 0.4690875059356265,
						"pct_stereotype,none": 0.7424242424242424,
						"pct_stereotype_stderr,none": 0.054240275510565296
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.721183800623053,
						"likelihood_diff_stderr,none": 0.1365819037593861,
						"pct_stereotype,none": 0.6074766355140186,
						"pct_stereotype_stderr,none": 0.02729748001296247
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.544466403162055,
						"likelihood_diff_stderr,none": 0.19278258656250766,
						"pct_stereotype,none": 0.44664031620553357,
						"pct_stereotype_stderr,none": 0.03131716554414947
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.4322916666666665,
						"likelihood_diff_stderr,none": 0.4124933126206636,
						"pct_stereotype,none": 0.6805555555555556,
						"pct_stereotype_stderr,none": 0.055335047518872166
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.8407608695652176,
						"likelihood_diff_stderr,none": 0.13534430295861505,
						"pct_stereotype,none": 0.5173913043478261,
						"pct_stereotype_stderr,none": 0.023323879622363822
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.0293478260869566,
						"likelihood_diff_stderr,none": 0.24081293892785152,
						"pct_stereotype,none": 0.782608695652174,
						"pct_stereotype_stderr,none": 0.038631448549506
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.385989010989011,
						"likelihood_diff_stderr,none": 0.32382647356314653,
						"pct_stereotype,none": 0.8351648351648352,
						"pct_stereotype_stderr,none": 0.039110176747367435
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.663265306122449,
						"likelihood_diff_stderr,none": 0.24268439558593918,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.031008683647302113
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"glue": {
						"acc,none": 0.5761523344449738,
						"acc_stderr,none": 0.014028021819876564,
						"alias": "glue",
						"f1,none": 0.6882502219987833,
						"f1_stderr,none": 0.00021238381450874074,
						"mcc,none": 0.00286100001416597,
						"mcc_stderr,none": 0.030802167125592427
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.0887035633055345,
						"exact_match_stderr,get-answer": 0.007831458737058714
					},
					"hellaswag": {
						"acc,none": 0.5258912567217686,
						"acc_norm,none": 0.7085241983668592,
						"acc_norm_stderr,none": 0.004535133886462041,
						"acc_stderr,none": 0.004983087049281744,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.12705746462604678,
						"acc_norm,none": 0.12705746462604678,
						"acc_norm_stderr,none": 0.05366844030122934,
						"acc_stderr,none": 0.05366844030122934,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.038612291966536955,
						"acc_stderr,none": 0.038612291966536955,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.112,
						"acc_norm,none": 0.112,
						"acc_norm_stderr,none": 0.009977753031397254,
						"acc_stderr,none": 0.009977753031397254,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.092,
						"acc_norm,none": 0.092,
						"acc_norm_stderr,none": 0.009144376393151082,
						"acc_stderr,none": 0.009144376393151082,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.012535235623319327,
						"acc_stderr,none": 0.012535235623319327,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.209,
						"acc_norm,none": 0.209,
						"acc_norm_stderr,none": 0.012864077288499354,
						"acc_stderr,none": 0.012864077288499354,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.015865408450741195,
						"acc_stderr,none": 0.015865408450741195,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.058,
						"acc_norm,none": 0.058,
						"acc_norm_stderr,none": 0.00739531545579294,
						"acc_stderr,none": 0.00739531545579294,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.109,
						"acc_norm,none": 0.109,
						"acc_norm_stderr,none": 0.00985982840703719,
						"acc_stderr,none": 0.00985982840703719,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.05,
						"acc_norm,none": 0.05,
						"acc_norm_stderr,none": 0.0068954729748979034,
						"acc_stderr,none": 0.0068954729748979034,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.028085923439997284,
						"acc_stderr,none": 0.028085923439997284,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.074,
						"acc_norm,none": 0.074,
						"acc_norm_stderr,none": 0.008282064512704168,
						"acc_stderr,none": 0.008282064512704168,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.24615384615384617,
						"acc_norm,none": 0.24615384615384617,
						"acc_norm_stderr,none": 0.03792711596479613,
						"acc_stderr,none": 0.03792711596479613,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.0416333199893227,
						"acc_stderr,none": 0.0416333199893227,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.048,
						"acc_norm,none": 0.048,
						"acc_norm_stderr,none": 0.006763264133666679,
						"acc_stderr,none": 0.006763264133666679,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.065,
						"acc_norm,none": 0.065,
						"acc_norm_stderr,none": 0.00779973306183202,
						"acc_stderr,none": 0.00779973306183202,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.196,
						"acc_norm,none": 0.196,
						"acc_norm_stderr,none": 0.012559527926707385,
						"acc_stderr,none": 0.012559527926707385,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.058,
						"acc_norm,none": 0.058,
						"acc_norm_stderr,none": 0.007395315455792946,
						"acc_stderr,none": 0.007395315455792946,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.011297239823409308,
						"acc_stderr,none": 0.011297239823409308,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.126,
						"acc_norm,none": 0.126,
						"acc_norm_stderr,none": 0.01049924922240805,
						"acc_stderr,none": 0.01049924922240805,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.00949157995752504,
						"acc_stderr,none": 0.00949157995752504,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.107,
						"acc_norm,none": 0.107,
						"acc_norm_stderr,none": 0.009779910359847165,
						"acc_stderr,none": 0.009779910359847165,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.04408440022768077,
						"acc_stderr,none": 0.04408440022768077,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.064,
						"acc_norm,none": 0.064,
						"acc_norm_stderr,none": 0.0077436402269193015,
						"acc_stderr,none": 0.0077436402269193015,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.108,
						"acc_norm,none": 0.108,
						"acc_norm_stderr,none": 0.009820001651345701,
						"acc_stderr,none": 0.009820001651345701,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.08,
						"acc_norm,none": 0.08,
						"acc_norm_stderr,none": 0.008583336977753651,
						"acc_stderr,none": 0.008583336977753651,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.231,
						"acc_norm,none": 0.231,
						"acc_norm_stderr,none": 0.01333479721693643,
						"acc_stderr,none": 0.01333479721693643,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.105,
						"acc_norm,none": 0.105,
						"acc_norm_stderr,none": 0.009698921026024949,
						"acc_stderr,none": 0.009698921026024949,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.194,
						"acc_norm,none": 0.194,
						"acc_norm_stderr,none": 0.012510816141264366,
						"acc_stderr,none": 0.012510816141264366,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.15333333333333332,
						"acc_norm,none": 0.15333333333333332,
						"acc_norm_stderr,none": 0.014721806604031804,
						"acc_stderr,none": 0.014721806604031804,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.139,
						"acc_norm,none": 0.139,
						"acc_norm_stderr,none": 0.010945263761042958,
						"acc_stderr,none": 0.010945263761042958,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.121,
						"acc_norm,none": 0.121,
						"acc_norm_stderr,none": 0.01031821038094609,
						"acc_stderr,none": 0.01031821038094609,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.091,
						"acc_norm,none": 0.091,
						"acc_norm_stderr,none": 0.009099549538400248,
						"acc_stderr,none": 0.009099549538400248,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.106,
						"acc_norm,none": 0.106,
						"acc_norm_stderr,none": 0.009739551265785129,
						"acc_stderr,none": 0.009739551265785129,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.0440844002276808,
						"acc_stderr,none": 0.0440844002276808,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.024337372337779075,
						"acc_stderr,none": 0.024337372337779075,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.251,
						"acc_norm,none": 0.251,
						"acc_norm_stderr,none": 0.013718133516888917,
						"acc_stderr,none": 0.013718133516888917,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.076,
						"acc_norm,none": 0.076,
						"acc_norm_stderr,none": 0.008384169266796375,
						"acc_stderr,none": 0.008384169266796375,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.137,
						"acc_norm,none": 0.137,
						"acc_norm_stderr,none": 0.010878848714333327,
						"acc_stderr,none": 0.010878848714333327,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.028085923439997273,
						"acc_stderr,none": 0.028085923439997273,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.147,
						"acc_norm,none": 0.147,
						"acc_norm_stderr,none": 0.011203415395160336,
						"acc_stderr,none": 0.011203415395160336,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.187,
						"acc_norm,none": 0.187,
						"acc_norm_stderr,none": 0.012336254828074125,
						"acc_stderr,none": 0.012336254828074125,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.205,
						"acc_norm,none": 0.205,
						"acc_norm_stderr,none": 0.02861764926136018,
						"acc_stderr,none": 0.02861764926136018,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.099,
						"acc_norm,none": 0.099,
						"acc_norm_stderr,none": 0.009449248027662746,
						"acc_stderr,none": 0.009449248027662746,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.5362859022144266,
						"acc_norm,none": 0.568,
						"acc_norm_stderr,none": 0.0004917354709418831,
						"acc_stderr,none": 0.0446103990286613,
						"alias": "kobest",
						"f1,none": 0.45296706369731293,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5477207977207977,
						"acc_stderr,none": 0.013287830794785743,
						"alias": " - kobest_boolq",
						"f1,none": 0.4478968694091616,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.015275252316519359,
						"alias": " - kobest_copa",
						"f1,none": 0.6290726817042607,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.444,
						"acc_norm,none": 0.568,
						"acc_norm_stderr,none": 0.022175109265613172,
						"acc_stderr,none": 0.022242244375731024,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.43871578359013674,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5340050377833753,
						"acc_stderr,none": 0.0250677696306619,
						"alias": " - kobest_sentineg",
						"f1,none": 0.40651943791261624,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4865079365079365,
						"acc_stderr,none": 0.014086365971849188,
						"alias": " - kobest_wic",
						"f1,none": 0.3391403035891616,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.7124005433727926,
						"acc_stderr,none": 0.01612482891631054,
						"alias": "lambada",
						"perplexity,none": 3.8058609547979567,
						"perplexity_stderr,none": 0.2287782595118224
					},
					"lambada_cloze": {
						"acc,none": 0.07413157384048127,
						"acc_stderr,none": 0.004277624730313147,
						"alias": "lambada_cloze",
						"perplexity,none": 375.2091205821181,
						"perplexity_stderr,none": 119.65724494959274
					},
					"lambada_multilingual": {
						"acc,none": 0.5369687560644284,
						"acc_stderr,none": 0.08450595721892018,
						"alias": "lambada_multilingual",
						"perplexity,none": 21.001255003421633,
						"perplexity_stderr,none": 8.211457257577647
					},
					"lambada_openai": {
						"acc,none": 0.74345041723268,
						"acc_stderr,none": 0.006084483727167678,
						"alias": " - lambada_openai",
						"perplexity,none": 3.3763932503864287,
						"perplexity_stderr,none": 0.06622884189802866
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.06966815447312245,
						"acc_stderr,none": 0.0035468936721517507,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 137.75162638548608,
						"perplexity_stderr,none": 3.8432669279035307
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.42732388899670093,
						"acc_stderr,none": 0.006891998788447821,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 34.40978079133955,
						"perplexity_stderr,none": 1.918344506489714
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7418979235396856,
						"acc_stderr,none": 0.006096490478492318,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.3762243984574694,
						"perplexity_stderr,none": 0.06624164898650113
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.44886473898699786,
						"acc_stderr,none": 0.0069294524147908345,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 29.05424891266719,
						"perplexity_stderr,none": 1.437037902876552
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5480302736270134,
						"acc_stderr,none": 0.006933763441941941,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 16.503461598315894,
						"perplexity_stderr,none": 0.8043491865041825
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.5187269551717446,
						"acc_stderr,none": 0.0069610900217951036,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 21.662559316328046,
						"perplexity_stderr,none": 1.1523954241432737
					},
					"lambada_standard": {
						"acc,none": 0.6827091014942752,
						"acc_stderr,none": 0.006484234706911054,
						"alias": " - lambada_standard",
						"perplexity,none": 4.23519760293699,
						"perplexity_stderr,none": 0.09010066981092772
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.0785949932078401,
						"acc_stderr,none": 0.003749167098251127,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 612.66661477875,
						"perplexity_stderr,none": 20.620812286359456
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.30279898218829515,
						"exact_match_stderr,get-answer": 0.011592260158888737
					},
					"logiqa": {
						"acc,none": 0.2457757296466974,
						"acc_norm,none": 0.28417818740399386,
						"acc_norm_stderr,none": 0.01769054268019079,
						"acc_stderr,none": 0.016887410894296927,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.24681933842239187,
						"acc_norm,none": 0.2729007633587786,
						"acc_norm_stderr,none": 0.011238571866209681,
						"acc_stderr,none": 0.010878050728561937,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.26566164154103855,
						"acc_norm,none": 0.27839195979899495,
						"acc_norm_stderr,none": 0.008205019480641219,
						"acc_stderr,none": 0.008085616216226036,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.5256301631010379,
						"acc_stderr,none": 0.005139130829797585,
						"alias": "mc_taco",
						"f1,none": 0.5305523530028299,
						"f1_stderr,none": 0.006210958312731179
					},
					"medmcqa": {
						"acc,none": 0.32942863973224956,
						"acc_norm,none": 0.32942863973224956,
						"acc_norm_stderr,none": 0.007267937144986338,
						"acc_stderr,none": 0.007267937144986338,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.3110761979575805,
						"acc_norm,none": 0.3110761979575805,
						"acc_norm_stderr,none": 0.012980022053195433,
						"acc_stderr,none": 0.012980022053195433,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.33086454920951425,
						"acc_stderr,none": 0.060605075693583886,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34814814814814815,
						"acc_stderr,none": 0.041153246103369526,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.26973684210526316,
						"acc_stderr,none": 0.03611780560284898,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.37358490566037733,
						"acc_stderr,none": 0.02977308271331987,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.3263888888888889,
						"acc_stderr,none": 0.03921067198982266,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621504,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036625,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.27167630057803466,
						"acc_stderr,none": 0.03391750322321659,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.22549019607843138,
						"acc_stderr,none": 0.041583075330832865,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542126,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.3659574468085106,
						"acc_stderr,none": 0.0314895582974553,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.03999423879281337,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2827586206896552,
						"acc_stderr,none": 0.03752833958003337,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2619047619047619,
						"acc_stderr,none": 0.022644212615525218,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.31746031746031744,
						"acc_stderr,none": 0.04163453031302859,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3967741935483871,
						"acc_stderr,none": 0.027831231605767937,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.29064039408866993,
						"acc_stderr,none": 0.03194740072265541,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.46060606060606063,
						"acc_stderr,none": 0.03892207016552012,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3686868686868687,
						"acc_stderr,none": 0.034373055019806184,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.45077720207253885,
						"acc_stderr,none": 0.03590910952235523,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.3076923076923077,
						"acc_stderr,none": 0.023400928918310495,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24814814814814815,
						"acc_stderr,none": 0.0263357394040558,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.31512605042016806,
						"acc_stderr,none": 0.030176808288974337,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23841059602649006,
						"acc_stderr,none": 0.034791855725996586,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3761467889908257,
						"acc_stderr,none": 0.02076923196820508,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.16203703703703703,
						"acc_stderr,none": 0.025130453652268455,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.45098039215686275,
						"acc_stderr,none": 0.03492406104163613,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.42616033755274263,
						"acc_stderr,none": 0.03219035703131774,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3721973094170404,
						"acc_stderr,none": 0.03244305283008731,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3511450381679389,
						"acc_stderr,none": 0.04186445163013751,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.32561105207226343,
						"acc_stderr,none": 0.0607809456258765,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.36363636363636365,
						"acc_stderr,none": 0.043913262867240704,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.32407407407407407,
						"acc_stderr,none": 0.04524596007030049,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3312883435582822,
						"acc_stderr,none": 0.03697983910025588,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2767857142857143,
						"acc_stderr,none": 0.042466243366976256,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.4174757281553398,
						"acc_stderr,none": 0.048828405482122375,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.41025641025641024,
						"acc_stderr,none": 0.03222414045241107,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252604,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4367816091954023,
						"acc_stderr,none": 0.017736470837800687,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.315028901734104,
						"acc_stderr,none": 0.025009313790069706,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2346368715083799,
						"acc_stderr,none": 0.014173044098303679,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.3202614379084967,
						"acc_stderr,none": 0.02671611838015685,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3662697135500483,
						"acc_stderr,none": 0.049834862065929936,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3987138263665595,
						"acc_stderr,none": 0.0278093225857745,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.36419753086419754,
						"acc_stderr,none": 0.026774929899722327,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2624113475177305,
						"acc_stderr,none": 0.026244920349843007,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.3011734028683181,
						"acc_stderr,none": 0.011717148751648424,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.35294117647058826,
						"acc_stderr,none": 0.029029422815681397,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.3284313725490196,
						"acc_stderr,none": 0.018999707383162666,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.39090909090909093,
						"acc_stderr,none": 0.04673752333670239,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.3224489795918367,
						"acc_stderr,none": 0.029923100563683906,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.35294117647058826,
						"acc_stderr,none": 0.04972757753075276,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.43283582089552236,
						"acc_stderr,none": 0.03503490923673282,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.28227085315572464,
						"acc_stderr,none": 0.060616974885657864,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.43,
						"acc_stderr,none": 0.049756985195624284,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.037117251907407486,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.391812865497076,
						"acc_stderr,none": 0.037439798259264,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.38145695364238413,
						"acc_stderr,none": 0.004903257549790859,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.36859235150528885,
						"acc_stderr,none": 0.004865519648319398,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.021463642763705344,
						"alias": "mrpc",
						"f1,none": 0.8416149068322981,
						"f1_stderr,none": 0.015473573586582462
					},
					"multimedqa": {
						"acc,none": 0.35358410220014197,
						"acc_norm,none": 0.32068854393146246,
						"acc_norm_stderr,none": 0.00010226000674634883,
						"acc_stderr,none": 0.09857919062749855,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.49257425742574257,
						"acc_stderr,none": 0.007181011029317661,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7004326577582155,
						"mrr_stderr,none": 0.010311480723006769,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4221218961625282,
						"r@2_stderr,none": 0.01660219170551757
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6583897683676962,
						"mrr_stderr,none": 0.010460590295448532,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.463882618510158,
						"r@2_stderr,none": 0.016763409667403396
					},
					"nq_open": {
						"alias": "nq_open",
						"exact_match,remove_whitespace": 0.012742382271468145,
						"exact_match_stderr,remove_whitespace": 0.0018670120483762712
					},
					"openbookqa": {
						"acc,none": 0.298,
						"acc_norm,none": 0.408,
						"acc_norm_stderr,none": 0.022000910893877186,
						"acc_stderr,none": 0.02047511809298897,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.010957190790298965,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3785,
						"acc_stderr,none": 0.010847935926107404,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.371,
						"acc_stderr,none": 0.010804530019138515,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5385,
						"acc_stderr,none": 0.011149934327957058,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.5175,
						"acc_stderr,none": 0.01117628425125418,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.4845,
						"acc_stderr,none": 0.01117776123260332,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.011183046555618487,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4554285714285714,
						"acc_stderr,none": 0.053940079204857536,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7704026115342764,
						"acc_norm,none": 0.7731229597388466,
						"acc_norm_stderr,none": 0.009771584259215167,
						"acc_stderr,none": 0.009812682950815194,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.24338172502134928,
						"acc_norm,none": 0.28031596925704527,
						"acc_norm_stderr,none": 0.0032814667207950675,
						"acc_stderr,none": 0.0031351299519620946,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.01987435483128749,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7433912692426552,
						"acc_norm,none": 0.6280221175210029,
						"acc_norm_stderr,none": 0.010330170441803736,
						"acc_stderr,none": 0.14350617224236548,
						"alias": "pythia",
						"bits_per_byte,none": 0.6325467753027727,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5502993074551348,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.3763932503864287,
						"perplexity_stderr,none": 0.06622884189802866,
						"word_perplexity,none": 10.428882412474298,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3900709219858156,
						"acc_norm,none": 0.4308510638297872,
						"acc_norm_stderr,none": 0.06166073934278518,
						"acc_stderr,none": 0.04155271878590972,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.44166666666666665,
						"acc_norm,none": 0.55,
						"acc_norm_stderr,none": 0.04560517440787951,
						"acc_stderr,none": 0.04552192400253557,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.36875,
						"acc_norm,none": 0.425,
						"acc_norm_stderr,none": 0.0392039498715957,
						"acc_stderr,none": 0.03826204233503227,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.38028169014084506,
						"acc_norm,none": 0.38380281690140844,
						"acc_norm_stderr,none": 0.028908177688046176,
						"acc_stderr,none": 0.028857363751758302,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4966135822807981,
						"acc_stderr,none": 0.006765255380909215,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.6763541924313629,
						"acc_stderr,none": 0.0023268885702922635,
						"alias": "qqp",
						"f1,none": 0.6869092910295983,
						"f1_stderr,none": 0.002600150100598662
					},
					"race": {
						"acc,none": 0.3559808612440191,
						"acc_stderr,none": 0.014818780400538108,
						"alias": "race"
					},
					"record": {
						"alias": "record",
						"em,none": 0.2529,
						"em_stderr,none": 0.004346958546485128,
						"f1,none": 0.26218714309036734,
						"f1_stderr,none": 0.004361717654503209
					},
					"rte": {
						"acc,none": 0.5884476534296029,
						"acc_stderr,none": 0.029621832222417196,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.956,
						"acc_norm,none": 0.93,
						"acc_norm_stderr,none": 0.008072494358323494,
						"acc_stderr,none": 0.006488921798427416,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5956678700361011,
						"acc_stderr,none": 0.029540420517619726,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.911697247706422,
						"acc_stderr,none": 0.009613983323850236,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5678796361091673,
						"acc_norm,none": 0.766520043986804,
						"acc_norm_stderr,none": 0.00299100598101513,
						"acc_stderr,none": 0.0035023636464972794,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.647166483644471,
						"acc_stderr,none": 0.07235531218680337,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5381610576923077,
						"acc_stderr,none": 0.00498965908191743,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.8256815648119996,
						"acc_stderr,none": 0.003819507769970271,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5811764705882353,
						"acc_stderr,none": 0.004885294527471592,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3318201004314263,
						"acc_stderr,none": 0.0016185678964646094,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.36107711138310894,
						"bleu_acc_stderr,none": 0.016814312844836882,
						"bleu_diff,none": -4.207307662674644,
						"bleu_diff_stderr,none": 0.9282874485829784,
						"bleu_max,none": 28.968049598440547,
						"bleu_max_stderr,none": 0.8184806122513136,
						"rouge1_acc,none": 0.3219094247246022,
						"rouge1_acc_stderr,none": 0.01635556761196041,
						"rouge1_diff,none": -5.379507228349093,
						"rouge1_diff_stderr,none": 1.0701075356835896,
						"rouge1_max,none": 54.33538941041346,
						"rouge1_max_stderr,none": 0.864656933048363,
						"rouge2_acc,none": 0.2974296205630355,
						"rouge2_acc_stderr,none": 0.016002651487361,
						"rouge2_diff,none": -6.972999086815549,
						"rouge2_diff_stderr,none": 1.2603592214712704,
						"rouge2_max,none": 38.77260507597626,
						"rouge2_max_stderr,none": 1.050835368985665,
						"rougeL_acc,none": 0.3329253365973072,
						"rougeL_acc_stderr,none": 0.016497402382012052,
						"rougeL_diff,none": -5.365626722282412,
						"rougeL_diff_stderr,none": 1.0872530514178778,
						"rougeL_max,none": 51.79395244112797,
						"rougeL_max_stderr,none": 0.8898050908068873
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.36107711138310894,
						"bleu_acc_stderr,none": 0.016814312844836882,
						"bleu_diff,none": -4.207307662674644,
						"bleu_diff_stderr,none": 0.9282874485829784,
						"bleu_max,none": 28.968049598440547,
						"bleu_max_stderr,none": 0.8184806122513136,
						"rouge1_acc,none": 0.3219094247246022,
						"rouge1_acc_stderr,none": 0.01635556761196041,
						"rouge1_diff,none": -5.379507228349093,
						"rouge1_diff_stderr,none": 1.0701075356835896,
						"rouge1_max,none": 54.33538941041346,
						"rouge1_max_stderr,none": 0.864656933048363,
						"rouge2_acc,none": 0.2974296205630355,
						"rouge2_acc_stderr,none": 0.016002651487361,
						"rouge2_diff,none": -6.972999086815549,
						"rouge2_diff_stderr,none": 1.2603592214712704,
						"rouge2_max,none": 38.77260507597626,
						"rouge2_max_stderr,none": 1.050835368985665,
						"rougeL_acc,none": 0.3329253365973072,
						"rougeL_acc_stderr,none": 0.016497402382012052,
						"rougeL_diff,none": -5.365626722282412,
						"rougeL_diff_stderr,none": 1.0872530514178778,
						"rougeL_max,none": 51.79395244112797,
						"rougeL_max_stderr,none": 0.8898050908068873
					},
					"truthfulqa_mc1": {
						"acc,none": 0.25703794369645044,
						"acc_stderr,none": 0.015298077509485081,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4066022571664022,
						"acc_stderr,none": 0.014334433217562895,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.0,
						"exact_match_stderr,none": 0.0
					},
					"wic": {
						"acc,none": 0.5783699059561128,
						"acc_stderr,none": 0.019565859392130996,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6325288887179478,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5502800869079052,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.428191022549841,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6740331491712708,
						"acc_stderr,none": 0.013173782636922194,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.43661971830985913,
						"acc_stderr,none": 0.0592793555841297,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.49038461538461536,
						"acc_stderr,none": 0.04925735314273532,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8168498168498168,
						"acc_stderr,none": 0.023452564261705,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.6223636363636363,
						"acc_stderr,none": 0.07216843330465124,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.6,
						"acc_stderr,none": 0.021930844120728505,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.01987435483128748,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.728,
						"acc_stderr,none": 0.019920483209566065,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.022270877485360437,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.022109039310618552,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.02209471322976178,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.636,
						"acc_stderr,none": 0.02153917063731769,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.020229346329177524,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.020354375480530085,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.44112449799196785,
						"acc_stderr,none": 0.0497121896277638,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337343,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.470281124497992,
						"acc_stderr,none": 0.010004353982613843,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.485140562248996,
						"acc_stderr,none": 0.01001764608425538,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.39036144578313253,
						"acc_stderr,none": 0.009778161879954577,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5441767068273092,
						"acc_stderr,none": 0.009982878443738406,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.5012048192771085,
						"acc_stderr,none": 0.010022043771315577,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4883534136546185,
						"acc_stderr,none": 0.010019353650807708,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.43373493975903615,
						"acc_stderr,none": 0.009933667945702074,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4899598393574297,
						"acc_stderr,none": 0.010020052116889139,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.39799196787148594,
						"acc_stderr,none": 0.009811284026425586,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.42168674698795183,
						"acc_stderr,none": 0.00989837949333545,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.46586345381526106,
						"acc_stderr,none": 0.00999868806610265,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.40923694779116465,
						"acc_stderr,none": 0.009855567414480243,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.41164658634538154,
						"acc_stderr,none": 0.009864360821750339,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3714859437751004,
						"acc_stderr,none": 0.00968537057188612,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.6328740749654053,
						"acc_stderr,none": 0.05996642434269435,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.598941098610192,
						"acc_stderr,none": 0.01261268831876705,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7782925215089345,
						"acc_stderr,none": 0.010689887294959689,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.7074784910655195,
						"acc_stderr,none": 0.011707038572975033,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5625413633355394,
						"acc_stderr,none": 0.012766070974549612,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.6015883520847121,
						"acc_stderr,none": 0.012598743938252858,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.6644606221045665,
						"acc_stderr,none": 0.01215116443816391,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.5466578424884183,
						"acc_stderr,none": 0.012810980537828155,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.6922567835870285,
						"acc_stderr,none": 0.01187789223516454,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5598941098610192,
						"acc_stderr,none": 0.012774475160716335,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.598941098610192,
						"acc_stderr,none": 0.012612688318767051,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6505625413633356,
						"acc_stderr,none": 0.01226989319022285,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.8035513598561475,
						"acc_stderr,none": 0.035647216912070484,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8606451612903225,
						"acc_stderr,none": 0.007183813190863162,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.7228915662650602,
						"acc_stderr,none": 0.04942589299783094,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.7507820646506778,
						"acc_stderr,none": 0.013975386806002537,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.752851711026616,
						"acc_stderr,none": 0.026649120420793503,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6634920634920635,
						"acc_stderr,none": 0.026665559335926015,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7678571428571429,
						"acc_stderr,none": 0.018824952299180426,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "rwkv-x-dev/v5-Eagle-7B-1_0T-HF"
	},
	"state-spaces/mamba-1.4b-hf": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5346674182638106,
						"acc_norm,none": 0.5169109357384442,
						"acc_norm_stderr,none": 0.0901501082303482,
						"acc_stderr,none": 0.11038575341467648,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3521875,
						"acc_stderr,none": 0.015037209565001783,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.29835,
						"acc_stderr,none": 0.25319922686864127,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8211492537313433,
						"acc_stderr,none": 0.1357261499872233,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.22808320950965824,
						"acc_norm,none": 0.22808320950965824,
						"acc_norm_stderr,none": 0.10631394691234715,
						"acc_stderr,none": 0.10631394691234715,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.25237437402866514,
						"acc_norm,none": 0.25237437402866514,
						"acc_norm_stderr,none": 0.03623512061437118,
						"acc_stderr,none": 0.03623512061437118,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.6402116401890736,
						"likelihood_diff_stderr,none": 0.471913010531481,
						"pct_stereotype,none": 0.5633571854502087,
						"pct_stereotype_stderr,none": 0.09299930353591648
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.04625984251968504,
						"exact_match_stderr,none": 0.004660818035610495
					},
					"glue": {
						"acc,none": 0.4697474988089567,
						"acc_stderr,none": 0.004082436577841039,
						"alias": "glue",
						"f1,none": 0.22582943919220141,
						"f1_stderr,none": 0.0016350714574805375,
						"mcc,none": -0.12239915757301645,
						"mcc_stderr,none": 0.030710327698406543
					},
					"kmmlu": {
						"acc,none": 0.1695928385792665,
						"acc_norm,none": 0.1695928385792665,
						"acc_norm_stderr,none": 0.03964372583432484,
						"acc_stderr,none": 0.03964372583432484,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.48629686472264855,
						"acc_norm,none": 0.462,
						"acc_norm_stderr,none": 0.0004981082164328657,
						"acc_stderr,none": 0.04143813534441999,
						"alias": "kobest",
						"f1,none": 0.3983926658136865,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6024645837376286,
						"acc_stderr,none": 0.02198681035743871,
						"alias": "lambada",
						"perplexity,none": 6.256157149635965,
						"perplexity_stderr,none": 0.6244302065513897
					},
					"lambada_cloze": {
						"acc,none": 0.052202600426935766,
						"acc_stderr,none": 0.003465720325789709,
						"alias": "lambada_cloze",
						"perplexity,none": 496.02774763563355,
						"perplexity_stderr,none": 87.80027255680362
					},
					"lambada_multilingual": {
						"acc,none": 0.4036095478362119,
						"acc_stderr,none": 0.09299638529776051,
						"alias": "lambada_multilingual",
						"perplexity,none": 73.05043593095773,
						"perplexity_stderr,none": 26.921658499115107
					},
					"mmlu": {
						"acc,none": 0.24675972083748757,
						"acc_stderr,none": 0.040959899913954365,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2588735387885229,
						"acc_stderr,none": 0.033027905215969273,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2349533311876408,
						"acc_stderr,none": 0.04520996219916423,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2333441663958401,
						"acc_stderr,none": 0.03296316439382121,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2534094513162069,
						"acc_stderr,none": 0.050273874905870146,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.26132008516678495,
						"acc_norm,none": 0.23279417315737264,
						"acc_norm_stderr,none": 7.885611105649266e-05,
						"acc_stderr,none": 0.10154046895873245,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4795,
						"acc_stderr,none": 0.03837052780781795,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7069152363601823,
						"acc_norm,none": 0.5231949841923784,
						"acc_norm_stderr,none": 0.010219545017061498,
						"acc_stderr,none": 0.13640941378521113,
						"alias": "pythia",
						"bits_per_byte,none": 0.7037479522024591,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6287305519242088,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.047479080179978,
						"perplexity_stderr,none": 0.12068885557779292,
						"word_perplexity,none": 13.578549189135115,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.30319148936170215,
						"acc_norm,none": 0.3882978723404255,
						"acc_norm_stderr,none": 0.05027948481310724,
						"acc_stderr,none": 0.0422577598480595,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5520614954577219,
						"acc_stderr,none": 0.03344713688963425,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.28209200611068425,
						"acc_stderr,none": 0.0014320003893629608,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2827417380660955,
						"bleu_acc_stderr,none": 0.015764770836777308,
						"bleu_diff,none": -7.540965232122667,
						"bleu_diff_stderr,none": 0.708780399154416,
						"bleu_max,none": 22.21115890836555,
						"bleu_max_stderr,none": 0.6911388759354934,
						"rouge1_acc,none": 0.2594859241126071,
						"rouge1_acc_stderr,none": 0.015345409485557994,
						"rouge1_diff,none": -10.223385023753384,
						"rouge1_diff_stderr,none": 0.7949966578940945,
						"rouge1_max,none": 47.10118109447428,
						"rouge1_max_stderr,none": 0.8454421627090442,
						"rouge2_acc,none": 0.18237454100367198,
						"rouge2_acc_stderr,none": 0.013518055636187212,
						"rouge2_diff,none": -12.44433338431279,
						"rouge2_diff_stderr,none": 0.9074252477600858,
						"rouge2_max,none": 29.80648575141351,
						"rouge2_max_stderr,none": 0.9368742186398287,
						"rougeL_acc,none": 0.23378212974296206,
						"rougeL_acc_stderr,none": 0.014816195991931591,
						"rougeL_diff,none": -10.398652019578794,
						"rougeL_diff_stderr,none": 0.7925931835498271,
						"rougeL_max,none": 44.07569039028453,
						"rougeL_max_stderr,none": 0.8527266622513323
					},
					"xcopa": {
						"acc,none": 0.5441818181818182,
						"acc_stderr,none": 0.031389855237621694,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.41563587684069614,
						"acc_stderr,none": 0.044093749051022736,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.542205643463089,
						"acc_stderr,none": 0.05184065235877343,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7244324567318499,
						"acc_stderr,none": 0.04981857414933562,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5346674182638106,
						"acc_norm,none": 0.5169109357384442,
						"acc_norm_stderr,none": 0.0901501082303482,
						"acc_stderr,none": 0.11038575341467648,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3521875,
						"acc_stderr,none": 0.015037209565001783,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.345,
						"acc_stderr,none": 0.015039986742055237,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.015167928865407555,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.35333333333333333,
						"acc_stderr,none": 0.013804572162314935,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.30119453924914674,
						"acc_norm,none": 0.3267918088737201,
						"acc_norm_stderr,none": 0.013706665975587333,
						"acc_stderr,none": 0.013406741767847626,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6498316498316499,
						"acc_norm,none": 0.6106902356902357,
						"acc_norm_stderr,none": 0.010005212782878142,
						"acc_stderr,none": 0.009788295410093153,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.29835,
						"acc_stderr,none": 0.25319922686864127,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.128,
						"acc_stderr,none": 0.0074723435386080395,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.9125,
						"acc_stderr,none": 0.006319956164639143,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.3175,
						"acc_stderr,none": 0.010411583719001251,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.6605,
						"acc_stderr,none": 0.01059131545094804,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.4305,
						"acc_stderr,none": 0.011074574398099859,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.415,
						"acc_stderr,none": 0.011020354990292204,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0435,
						"acc_stderr,none": 0.004562267215000635,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.066,
						"acc_stderr,none": 0.005553144938623078,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.002,
						"acc_stderr,none": 0.000999249343069501,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.008,
						"acc_stderr,none": 0.0019924821184884645,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.006073752711496746,
						"acc_stderr,none": 0.0016186926522842688,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8211492537313433,
						"acc_stderr,none": 0.1357261499872233,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.924,
						"acc_stderr,none": 0.008384169266796375,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.002818500300504505,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578221,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.793,
						"acc_stderr,none": 0.012818553557843986,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904614,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.783,
						"acc_stderr,none": 0.01304151375727071,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.654,
						"acc_stderr,none": 0.015050266127564445,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.643,
						"acc_stderr,none": 0.015158521721486769,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.842,
						"acc_stderr,none": 0.01153989467755957,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448786,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611489,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.006558812241406117,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380709,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.0076298239962803065,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.01013146813875698,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151113,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.005034813735318214,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.010878848714333316,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.014553205687950427,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.813,
						"acc_stderr,none": 0.012336254828074118,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.825,
						"acc_stderr,none": 0.012021627157731989,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936706,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.846,
						"acc_stderr,none": 0.011419913065098687,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.964,
						"acc_stderr,none": 0.005893957816165562,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.472,
						"acc_stderr,none": 0.015794475789511476,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381793,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.012975838021968762,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.713,
						"acc_stderr,none": 0.014312087053809961,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.01123486636423526,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697071,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.866,
						"acc_stderr,none": 0.010777762298369681,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704159,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.771,
						"acc_stderr,none": 0.013294199326613604,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.012975838021968762,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.661,
						"acc_stderr,none": 0.014976758771620342,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.619,
						"acc_stderr,none": 0.015364734787007436,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.665,
						"acc_stderr,none": 0.014933117490932577,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.012259457340938565,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.0152048409129195,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397245,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.00905439020486644,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.689,
						"acc_stderr,none": 0.014645596385722692,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380701,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426683,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.812,
						"acc_stderr,none": 0.012361586015103754,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.688,
						"acc_stderr,none": 0.01465847437050901,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.429,
						"acc_stderr,none": 0.015658997547870247,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295445,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.902,
						"acc_stderr,none": 0.00940661918462125,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.003148000938676761,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.74,
						"acc_stderr,none": 0.013877773329774166,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.461,
						"acc_stderr,none": 0.015771104201283186,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.013607356839598123,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248085,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.693,
						"acc_stderr,none": 0.01459328489285263,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336664,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.01031821038094609,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.013512312258920838,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.012486268734370145,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.0075720760915574245,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855755,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.006125072776426107,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.951,
						"acc_stderr,none": 0.006829761756140924,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.515,
						"acc_stderr,none": 0.015812179641814892,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.386,
						"acc_stderr,none": 0.01540263747678437,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6330275229357798,
						"acc_stderr,none": 0.008429864284269442,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.17857142857142858,
						"acc_stderr,none": 0.05164277182008721,
						"alias": "cb",
						"f1,none": 0.15555555555555553,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.22808320950965824,
						"acc_norm,none": 0.22808320950965824,
						"acc_norm_stderr,none": 0.10631394691234715,
						"acc_stderr,none": 0.10631394691234715,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.42424242424242425,
						"acc_norm,none": 0.42424242424242425,
						"acc_norm_stderr,none": 0.08736789844447573,
						"acc_stderr,none": 0.08736789844447573,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736842,
						"acc_stderr,none": 0.05263157894736842,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.07226812131946557,
						"acc_stderr,none": 0.07226812131946557,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0879391124952055,
						"acc_stderr,none": 0.0879391124952055,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2553191489361702,
						"acc_norm,none": 0.2553191489361702,
						"acc_norm_stderr,none": 0.06429065810876616,
						"acc_stderr,none": 0.06429065810876616,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445797,
						"acc_stderr,none": 0.06180629713445797,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.06861056852129649,
						"acc_stderr,none": 0.06861056852129649,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.0686105685212965,
						"acc_stderr,none": 0.0686105685212965,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.16129032258064516,
						"acc_norm,none": 0.16129032258064516,
						"acc_norm_stderr,none": 0.06715051611181073,
						"acc_stderr,none": 0.06715051611181073,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.25806451612903225,
						"acc_norm,none": 0.25806451612903225,
						"acc_norm_stderr,none": 0.07988892740217939,
						"acc_stderr,none": 0.07988892740217939,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756935,
						"acc_stderr,none": 0.10513149660756935,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956522,
						"acc_stderr,none": 0.04347826086956522,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.09829463743659808,
						"acc_stderr,none": 0.09829463743659808,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.05763033956734372,
						"acc_stderr,none": 0.05763033956734372,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.14285714285714285,
						"acc_norm,none": 0.14285714285714285,
						"acc_norm_stderr,none": 0.07824607964359516,
						"acc_stderr,none": 0.07824607964359516,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.11236664374387367,
						"acc_stderr,none": 0.11236664374387367,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996394,
						"acc_stderr,none": 0.08081046758996394,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.085947008518708,
						"acc_stderr,none": 0.085947008518708,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628253,
						"acc_stderr,none": 0.05817221556628253,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.06957698714453994,
						"acc_stderr,none": 0.06957698714453994,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.2391304347826087,
						"acc_norm,none": 0.2391304347826087,
						"acc_norm_stderr,none": 0.06358669845936324,
						"acc_stderr,none": 0.06358669845936324,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764435,
						"acc_stderr,none": 0.09361833424764435,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.25237437402866514,
						"acc_norm,none": 0.25237437402866514,
						"acc_norm_stderr,none": 0.03623512061437118,
						"acc_stderr,none": 0.03623512061437118,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.24401913875598086,
						"acc_norm,none": 0.24401913875598086,
						"acc_norm_stderr,none": 0.029780753228706106,
						"acc_stderr,none": 0.029780753228706106,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865143,
						"acc_stderr,none": 0.03462157845865143,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.25190839694656486,
						"acc_norm,none": 0.25190839694656486,
						"acc_norm_stderr,none": 0.03807387116306086,
						"acc_stderr,none": 0.03807387116306086,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037267799624996496,
						"acc_stderr,none": 0.037267799624996496,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.04218811928205305,
						"acc_stderr,none": 0.04218811928205305,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2476780185758514,
						"acc_norm,none": 0.2476780185758514,
						"acc_norm_stderr,none": 0.024055681892974835,
						"acc_stderr,none": 0.024055681892974835,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604257,
						"acc_stderr,none": 0.030587591351604257,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.24581005586592178,
						"acc_norm,none": 0.24581005586592178,
						"acc_norm_stderr,none": 0.032272320235413,
						"acc_stderr,none": 0.032272320235413,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25738396624472576,
						"acc_norm,none": 0.25738396624472576,
						"acc_norm_stderr,none": 0.028458820991460295,
						"acc_stderr,none": 0.028458820991460295,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.29245283018867924,
						"acc_norm,none": 0.29245283018867924,
						"acc_norm_stderr,none": 0.04439263906199629,
						"acc_stderr,none": 0.04439263906199629,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3364485981308411,
						"acc_norm,none": 0.3364485981308411,
						"acc_norm_stderr,none": 0.045892711114716274,
						"acc_stderr,none": 0.045892711114716274,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3113207547169811,
						"acc_norm,none": 0.3113207547169811,
						"acc_norm_stderr,none": 0.0451874553177075,
						"acc_stderr,none": 0.0451874553177075,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2037037037037037,
						"acc_norm,none": 0.2037037037037037,
						"acc_norm_stderr,none": 0.03893542518824847,
						"acc_stderr,none": 0.03893542518824847,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.20952380952380953,
						"acc_norm,none": 0.20952380952380953,
						"acc_norm_stderr,none": 0.039906571509931855,
						"acc_stderr,none": 0.039906571509931855,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371223,
						"acc_stderr,none": 0.04198857662371223,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.23443223443223443,
						"acc_norm,none": 0.23443223443223443,
						"acc_norm_stderr,none": 0.025687156459084187,
						"acc_stderr,none": 0.025687156459084187,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604246,
						"acc_stderr,none": 0.030587591351604246,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.25146198830409355,
						"acc_norm,none": 0.25146198830409355,
						"acc_norm_stderr,none": 0.033275044238468436,
						"acc_stderr,none": 0.033275044238468436,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.23741007194244604,
						"acc_norm,none": 0.23741007194244604,
						"acc_norm_stderr,none": 0.036220593237998276,
						"acc_stderr,none": 0.036220593237998276,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.034229240176444506,
						"acc_stderr,none": 0.034229240176444506,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.24539877300613497,
						"acc_norm,none": 0.24539877300613497,
						"acc_norm_stderr,none": 0.03380939813943354,
						"acc_stderr,none": 0.03380939813943354,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.02839429305079051,
						"acc_stderr,none": 0.02839429305079051,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2474747474747475,
						"acc_norm,none": 0.2474747474747475,
						"acc_norm_stderr,none": 0.03074630074212451,
						"acc_stderr,none": 0.03074630074212451,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.27310924369747897,
						"acc_norm,none": 0.27310924369747897,
						"acc_norm_stderr,none": 0.028942004040998164,
						"acc_stderr,none": 0.028942004040998164,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2782608695652174,
						"acc_norm,none": 0.2782608695652174,
						"acc_norm_stderr,none": 0.029614094221633722,
						"acc_stderr,none": 0.029614094221633722,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.03642192783741706,
						"acc_stderr,none": 0.03642192783741706,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032732683535398856,
						"acc_stderr,none": 0.032732683535398856,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2483221476510067,
						"acc_norm,none": 0.2483221476510067,
						"acc_norm_stderr,none": 0.0355134404169743,
						"acc_stderr,none": 0.0355134404169743,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714263,
						"acc_stderr,none": 0.04025566684714263,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.03334645408665338,
						"acc_stderr,none": 0.03334645408665338,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2636363636363636,
						"acc_norm,none": 0.2636363636363636,
						"acc_norm_stderr,none": 0.04220224692971987,
						"acc_stderr,none": 0.04220224692971987,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.25874125874125875,
						"acc_norm,none": 0.25874125874125875,
						"acc_norm_stderr,none": 0.03675137438900237,
						"acc_stderr,none": 0.03675137438900237,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604674,
						"acc_stderr,none": 0.03893259610604674,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2441860465116279,
						"acc_norm,none": 0.2441860465116279,
						"acc_norm_stderr,none": 0.03285260554707745,
						"acc_stderr,none": 0.03285260554707745,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25060827250608275,
						"acc_norm,none": 0.25060827250608275,
						"acc_norm_stderr,none": 0.021402288814095338,
						"acc_stderr,none": 0.021402288814095338,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.029761395837435988,
						"acc_stderr,none": 0.029761395837435988,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03887917804888516,
						"acc_stderr,none": 0.03887917804888516,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2540983606557377,
						"acc_norm,none": 0.2540983606557377,
						"acc_norm_stderr,none": 0.03957756102798664,
						"acc_stderr,none": 0.03957756102798664,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24285714285714285,
						"acc_norm,none": 0.24285714285714285,
						"acc_norm_stderr,none": 0.029661370413965837,
						"acc_stderr,none": 0.029661370413965837,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.23333333333333334,
						"acc_norm,none": 0.23333333333333334,
						"acc_norm_stderr,none": 0.03161296043612008,
						"acc_stderr,none": 0.03161296043612008,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871163,
						"acc_stderr,none": 0.03152480234871163,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.25517241379310346,
						"acc_norm,none": 0.25517241379310346,
						"acc_norm_stderr,none": 0.03632984052707842,
						"acc_stderr,none": 0.03632984052707842,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055042,
						"acc_stderr,none": 0.04232473532055042,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.032888897342098225,
						"acc_stderr,none": 0.032888897342098225,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.24644549763033174,
						"acc_norm,none": 0.24644549763033174,
						"acc_norm_stderr,none": 0.02973775172659683,
						"acc_stderr,none": 0.02973775172659683,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.022360679774997897,
						"acc_stderr,none": 0.022360679774997897,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.23706896551724138,
						"acc_norm,none": 0.23706896551724138,
						"acc_norm_stderr,none": 0.027981694008624977,
						"acc_stderr,none": 0.027981694008624977,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.25287356321839083,
						"acc_norm,none": 0.25287356321839083,
						"acc_norm_stderr,none": 0.0330465186437516,
						"acc_stderr,none": 0.0330465186437516,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.028952167450890808,
						"acc_stderr,none": 0.028952167450890808,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.0340150671524904,
						"acc_stderr,none": 0.0340150671524904,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676974,
						"acc_stderr,none": 0.03410167836676974,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2422360248447205,
						"acc_norm,none": 0.2422360248447205,
						"acc_norm_stderr,none": 0.03387086996153082,
						"acc_stderr,none": 0.03387086996153082,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.12239915757301645,
						"mcc_stderr,none": 0.030710327698406543
					},
					"copa": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.04020151261036843,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.6402116401890736,
						"likelihood_diff_stderr,none": 0.471913010531481,
						"pct_stereotype,none": 0.5633571854502087,
						"pct_stereotype_stderr,none": 0.09299930353591648
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.552029138245466,
						"likelihood_diff_stderr,none": 0.08612785975104485,
						"pct_stereotype,none": 0.6386404293381037,
						"pct_stereotype_stderr,none": 0.011734402417305051
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.7302325322077823,
						"likelihood_diff_stderr,none": 0.38634546281659077,
						"pct_stereotype,none": 0.7362637362637363,
						"pct_stereotype_stderr,none": 0.04644942852497394
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.004888014359907,
						"likelihood_diff_stderr,none": 1.7481589482761957,
						"pct_stereotype,none": 0.6363636363636364,
						"pct_stereotype_stderr,none": 0.15212000482437738
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.345877544696514,
						"likelihood_diff_stderr,none": 0.6516949583158926,
						"pct_stereotype,none": 0.7076923076923077,
						"pct_stereotype_stderr,none": 0.056852867304209534
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.5501682698726653,
						"likelihood_diff_stderr,none": 0.16658570512640458,
						"pct_stereotype,none": 0.665625,
						"pct_stereotype_stderr,none": 0.026414133635631585
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.3570377650084318,
						"likelihood_diff_stderr,none": 0.2151486492590799,
						"pct_stereotype,none": 0.5324074074074074,
						"pct_stereotype_stderr,none": 0.03402801581358966
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.725586599773831,
						"likelihood_diff_stderr,none": 0.3668135223688071,
						"pct_stereotype,none": 0.7222222222222222,
						"pct_stereotype_stderr,none": 0.053156331218399945
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.4174723324813243,
						"likelihood_diff_stderr,none": 0.14507702799698768,
						"pct_stereotype,none": 0.5551181102362205,
						"pct_stereotype_stderr,none": 0.022070444592370703
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.6592008401681713,
						"likelihood_diff_stderr,none": 0.36717624599385734,
						"pct_stereotype,none": 0.7837837837837838,
						"pct_stereotype_stderr,none": 0.039250566187156465
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.3280398461126515,
						"likelihood_diff_stderr,none": 0.41196248455918566,
						"pct_stereotype,none": 0.8602150537634409,
						"pct_stereotype_stderr,none": 0.036152622588464155
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.197011285079153,
						"likelihood_diff_stderr,none": 0.25294949147047086,
						"pct_stereotype,none": 0.631578947368421,
						"pct_stereotype_stderr,none": 0.03508771929824559
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.7282332134303693,
						"likelihood_diff_stderr,none": 0.09212793785337944,
						"pct_stereotype,none": 0.4895646988670245,
						"pct_stereotype_stderr,none": 0.012210638982043403
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.3205751207139755,
						"likelihood_diff_stderr,none": 0.3385155989578591,
						"pct_stereotype,none": 0.4777777777777778,
						"pct_stereotype_stderr,none": 0.05294752255076824
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.7968406677246094,
						"likelihood_diff_stderr,none": 0.5051154103855336,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.13323467750529824
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.852602351795543,
						"likelihood_diff_stderr,none": 0.43696897624067793,
						"pct_stereotype,none": 0.6212121212121212,
						"pct_stereotype_stderr,none": 0.0601674102524024
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.447452123291396,
						"likelihood_diff_stderr,none": 0.19862187951100613,
						"pct_stereotype,none": 0.5295950155763239,
						"pct_stereotype_stderr,none": 0.027901844420051183
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.438853757654726,
						"likelihood_diff_stderr,none": 0.2694336535027759,
						"pct_stereotype,none": 0.34782608695652173,
						"pct_stereotype_stderr,none": 0.030002850406189337
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.411216762330797,
						"likelihood_diff_stderr,none": 0.41637119834256525,
						"pct_stereotype,none": 0.5972222222222222,
						"pct_stereotype_stderr,none": 0.05820650942569533
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.486552155536154,
						"likelihood_diff_stderr,none": 0.18268279322290257,
						"pct_stereotype,none": 0.3673913043478261,
						"pct_stereotype_stderr,none": 0.02250223585295917
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.45685277192489,
						"likelihood_diff_stderr,none": 0.3389667871441239,
						"pct_stereotype,none": 0.6521739130434783,
						"pct_stereotype_stderr,none": 0.044607754438485
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.675998100867638,
						"likelihood_diff_stderr,none": 0.3254621179338415,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.051282051282051246
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.001680364414137,
						"likelihood_diff_stderr,none": 0.26898865483160755,
						"pct_stereotype,none": 0.6326530612244898,
						"pct_stereotype_stderr,none": 0.03452261728704164
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.04625984251968504,
						"exact_match_stderr,none": 0.004660818035610495
					},
					"glue": {
						"acc,none": 0.4697474988089567,
						"acc_stderr,none": 0.004082436577841039,
						"alias": "glue",
						"f1,none": 0.22582943919220141,
						"f1_stderr,none": 0.0016350714574805375,
						"mcc,none": -0.12239915757301645,
						"mcc_stderr,none": 0.030710327698406543
					},
					"hellaswag": {
						"acc,none": 0.45050786695877315,
						"acc_norm,none": 0.5902210714997013,
						"acc_norm_stderr,none": 0.004907877144720017,
						"acc_stderr,none": 0.004965276587781615,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.1695928385792665,
						"acc_norm,none": 0.1695928385792665,
						"acc_norm_stderr,none": 0.03964372583432484,
						"acc_stderr,none": 0.03964372583432484,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.038612291966536955,
						"acc_stderr,none": 0.038612291966536955,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.156,
						"acc_norm,none": 0.156,
						"acc_norm_stderr,none": 0.01148023500612237,
						"acc_stderr,none": 0.01148023500612237,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.171,
						"acc_norm,none": 0.171,
						"acc_norm_stderr,none": 0.0119122164562646,
						"acc_stderr,none": 0.0119122164562646,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.012997843819031817,
						"acc_stderr,none": 0.012997843819031817,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.18833333333333332,
						"acc_norm,none": 0.18833333333333332,
						"acc_norm_stderr,none": 0.015974932830731804,
						"acc_stderr,none": 0.015974932830731804,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.141,
						"acc_norm,none": 0.141,
						"acc_norm_stderr,none": 0.011010914595992436,
						"acc_stderr,none": 0.011010914595992436,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.08,
						"acc_norm,none": 0.08,
						"acc_norm_stderr,none": 0.008583336977753656,
						"acc_stderr,none": 0.008583336977753656,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.010463483381956722,
						"acc_stderr,none": 0.010463483381956722,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.108,
						"acc_norm,none": 0.108,
						"acc_norm_stderr,none": 0.009820001651345684,
						"acc_stderr,none": 0.009820001651345684,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2923076923076923,
						"acc_norm,none": 0.2923076923076923,
						"acc_norm_stderr,none": 0.04004492683616139,
						"acc_stderr,none": 0.04004492683616139,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.176,
						"acc_norm,none": 0.176,
						"acc_norm_stderr,none": 0.012048616898597517,
						"acc_stderr,none": 0.012048616898597517,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.132,
						"acc_norm,none": 0.132,
						"acc_norm_stderr,none": 0.010709373963528033,
						"acc_stderr,none": 0.010709373963528033,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.218,
						"acc_norm,none": 0.218,
						"acc_norm_stderr,none": 0.013063179040595296,
						"acc_stderr,none": 0.013063179040595296,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.167,
						"acc_norm,none": 0.167,
						"acc_norm_stderr,none": 0.011800434324644586,
						"acc_stderr,none": 0.011800434324644586,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.178,
						"acc_norm,none": 0.178,
						"acc_norm_stderr,none": 0.012102167676183577,
						"acc_stderr,none": 0.012102167676183577,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.131,
						"acc_norm,none": 0.131,
						"acc_norm_stderr,none": 0.010674874844837954,
						"acc_stderr,none": 0.010674874844837954,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.192,
						"acc_norm,none": 0.192,
						"acc_norm_stderr,none": 0.012461592646659985,
						"acc_stderr,none": 0.012461592646659985,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.17,
						"acc_norm,none": 0.17,
						"acc_norm_stderr,none": 0.011884495834541667,
						"acc_stderr,none": 0.011884495834541667,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909283,
						"acc_stderr,none": 0.04292346959909283,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.122,
						"acc_norm,none": 0.122,
						"acc_norm_stderr,none": 0.010354864712936705,
						"acc_stderr,none": 0.010354864712936705,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.132,
						"acc_norm,none": 0.132,
						"acc_norm_stderr,none": 0.010709373963528024,
						"acc_stderr,none": 0.010709373963528024,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.132,
						"acc_norm,none": 0.132,
						"acc_norm_stderr,none": 0.010709373963528005,
						"acc_stderr,none": 0.010709373963528005,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.228,
						"acc_norm,none": 0.228,
						"acc_norm_stderr,none": 0.013273740700804488,
						"acc_stderr,none": 0.013273740700804488,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.155,
						"acc_norm,none": 0.155,
						"acc_norm_stderr,none": 0.011450157470799477,
						"acc_stderr,none": 0.011450157470799477,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.214,
						"acc_norm,none": 0.214,
						"acc_norm_stderr,none": 0.012975838021968769,
						"acc_stderr,none": 0.012975838021968769,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.165,
						"acc_norm,none": 0.165,
						"acc_norm_stderr,none": 0.015166026086137761,
						"acc_stderr,none": 0.015166026086137761,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.158,
						"acc_norm,none": 0.158,
						"acc_norm_stderr,none": 0.011539894677559562,
						"acc_stderr,none": 0.011539894677559562,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.183,
						"acc_norm,none": 0.183,
						"acc_norm_stderr,none": 0.012233587399477825,
						"acc_stderr,none": 0.012233587399477825,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.164,
						"acc_norm,none": 0.164,
						"acc_norm_stderr,none": 0.011715000693181309,
						"acc_stderr,none": 0.011715000693181309,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.146,
						"acc_norm,none": 0.146,
						"acc_norm_stderr,none": 0.011171786285496496,
						"acc_stderr,none": 0.011171786285496496,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23666666666666666,
						"acc_norm,none": 0.23666666666666666,
						"acc_norm_stderr,none": 0.024580463430538727,
						"acc_stderr,none": 0.024580463430538727,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.253,
						"acc_norm,none": 0.253,
						"acc_norm_stderr,none": 0.01375427861358708,
						"acc_stderr,none": 0.01375427861358708,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.149,
						"acc_norm,none": 0.149,
						"acc_norm_stderr,none": 0.01126614068463216,
						"acc_stderr,none": 0.01126614068463216,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.19,
						"acc_norm,none": 0.19,
						"acc_norm_stderr,none": 0.012411851354816325,
						"acc_stderr,none": 0.012411851354816325,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.175,
						"acc_norm,none": 0.175,
						"acc_norm_stderr,none": 0.026935153843310702,
						"acc_stderr,none": 0.026935153843310702,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.196,
						"acc_norm,none": 0.196,
						"acc_norm_stderr,none": 0.012559527926707371,
						"acc_stderr,none": 0.012559527926707371,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.01228519132638669,
						"acc_stderr,none": 0.01228519132638669,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.028355248200333395,
						"acc_stderr,none": 0.028355248200333395,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.128,
						"acc_norm,none": 0.128,
						"acc_norm_stderr,none": 0.010570133761108658,
						"acc_stderr,none": 0.010570133761108658,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.48629686472264855,
						"acc_norm,none": 0.462,
						"acc_norm_stderr,none": 0.0004981082164328657,
						"acc_stderr,none": 0.04143813534441999,
						"alias": "kobest",
						"f1,none": 0.3983926658136865,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5049857549857549,
						"acc_stderr,none": 0.013348103841229714,
						"alias": " - kobest_boolq",
						"f1,none": 0.3683482791054158,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.511,
						"acc_stderr,none": 0.01581547119529268,
						"alias": " - kobest_copa",
						"f1,none": 0.5105884048484776,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.346,
						"acc_norm,none": 0.462,
						"acc_norm_stderr,none": 0.022318338119870523,
						"acc_stderr,none": 0.021294951277234634,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3414288160921065,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5289672544080605,
						"acc_stderr,none": 0.025083743486632528,
						"alias": " - kobest_sentineg",
						"f1,none": 0.517191818684356,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6024645837376286,
						"acc_stderr,none": 0.02198681035743871,
						"alias": "lambada",
						"perplexity,none": 6.256157149635965,
						"perplexity_stderr,none": 0.6244302065513897
					},
					"lambada_cloze": {
						"acc,none": 0.052202600426935766,
						"acc_stderr,none": 0.003465720325789709,
						"alias": "lambada_cloze",
						"perplexity,none": 496.02774763563355,
						"perplexity_stderr,none": 87.80027255680362
					},
					"lambada_multilingual": {
						"acc,none": 0.4036095478362119,
						"acc_stderr,none": 0.09299638529776051,
						"alias": "lambada_multilingual",
						"perplexity,none": 73.05043593095773,
						"perplexity_stderr,none": 26.921658499115107
					},
					"lambada_openai": {
						"acc,none": 0.6448670677275373,
						"acc_stderr,none": 0.006667187146789756,
						"alias": " - lambada_openai",
						"perplexity,none": 5.047479080179978,
						"perplexity_stderr,none": 0.12068885557779292
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.04909761304094702,
						"acc_stderr,none": 0.0030103031355499157,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 324.55931672049496,
						"perplexity_stderr,none": 11.066176302063816
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3081699980593829,
						"acc_stderr,none": 0.006432902165497003,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 94.77398438873439,
						"perplexity_stderr,none": 5.715256779769182
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6442848825926645,
						"acc_stderr,none": 0.006669637106573884,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 5.048684325928006,
						"perplexity_stderr,none": 0.12083599189650227
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.33378614399379003,
						"acc_stderr,none": 0.0065698137161900386,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 96.04710530426931,
						"perplexity_stderr,none": 5.526979453565192
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.3844362507277314,
						"acc_stderr,none": 0.006777363157541737,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 67.15163245862558,
						"perplexity_stderr,none": 3.8786865446100562
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.34737046380749076,
						"acc_stderr,none": 0.0066334869563926665,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 102.23077317723134,
						"perplexity_stderr,none": 6.292675853188398
					},
					"lambada_standard": {
						"acc,none": 0.5606442848825927,
						"acc_stderr,none": 0.006914549858799189,
						"alias": " - lambada_standard",
						"perplexity,none": 7.462314717231179,
						"perplexity_stderr,none": 0.19429066697646374
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.05530758781292451,
						"acc_stderr,none": 0.0031845615350719844,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 667.4961785507721,
						"perplexity_stderr,none": 24.356661415177726
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.22010178117048346,
						"exact_match_stderr,get-answer": 0.010453040914078901
					},
					"logiqa": {
						"acc,none": 0.20430107526881722,
						"acc_norm,none": 0.2764976958525346,
						"acc_norm_stderr,none": 0.017543209075825184,
						"acc_stderr,none": 0.015814411436934697,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2321882951653944,
						"acc_norm,none": 0.2697201017811705,
						"acc_norm_stderr,none": 0.011197298041713571,
						"acc_stderr,none": 0.010652693534388778,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25661641541038527,
						"acc_norm,none": 0.25326633165829143,
						"acc_norm_stderr,none": 0.007961083648018718,
						"acc_stderr,none": 0.007995567445627896,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.4241686083456895,
						"acc_stderr,none": 0.005086369797857499,
						"alias": "mc_taco",
						"f1,none": 0.47776390356353854,
						"f1_stderr,none": 0.006055221444365892
					},
					"medmcqa": {
						"acc,none": 0.2338034903179536,
						"acc_norm,none": 0.2338034903179536,
						"acc_norm_stderr,none": 0.006544908291307753,
						"acc_stderr,none": 0.006544908291307753,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.2356637863315004,
						"acc_norm,none": 0.2356637863315004,
						"acc_norm_stderr,none": 0.01189994867277274,
						"acc_stderr,none": 0.01189994867277274,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.24675972083748757,
						"acc_stderr,none": 0.040959899913954365,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542127,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.2814814814814815,
						"acc_stderr,none": 0.03885004245800253,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.3157894736842105,
						"acc_stderr,none": 0.0378272898086547,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526066,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.02461829819586651,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2569444444444444,
						"acc_stderr,none": 0.03653946969442099,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.16,
						"acc_stderr,none": 0.03684529491774709,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.0416333199893227,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.1907514450867052,
						"acc_stderr,none": 0.029957851329869337,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.04220773659171453,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2425531914893617,
						"acc_stderr,none": 0.028020226271200217,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.20175438596491227,
						"acc_stderr,none": 0.03775205013583638,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.18620689655172415,
						"acc_stderr,none": 0.03243946159004615,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2830687830687831,
						"acc_stderr,none": 0.023201392938194974,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.21428571428571427,
						"acc_stderr,none": 0.03670066451047181,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.04824181513244218,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2645161290322581,
						"acc_stderr,none": 0.025091892378859275,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2561576354679803,
						"acc_stderr,none": 0.030712730070982592,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.04760952285695235,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.20606060606060606,
						"acc_stderr,none": 0.03158415324047709,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.21717171717171718,
						"acc_stderr,none": 0.029376616484945627,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.20725388601036268,
						"acc_stderr,none": 0.029252823291803644,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2564102564102564,
						"acc_stderr,none": 0.02213908110397153,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.27037037037037037,
						"acc_stderr,none": 0.027080372815145654,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.19327731092436976,
						"acc_stderr,none": 0.025649470265889193,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.24503311258278146,
						"acc_stderr,none": 0.03511807571804724,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.23302752293577983,
						"acc_stderr,none": 0.018125669180861493,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.14351851851851852,
						"acc_stderr,none": 0.02391077925264438,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.23039215686274508,
						"acc_stderr,none": 0.029554292605695042,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.33755274261603374,
						"acc_stderr,none": 0.030781549102026223,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.2242152466367713,
						"acc_stderr,none": 0.027991534258519513,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2748091603053435,
						"acc_stderr,none": 0.03915345408847836,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2588735387885229,
						"acc_stderr,none": 0.033027905215969273,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2975206611570248,
						"acc_stderr,none": 0.04173349148083498,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.04414343666854933,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.25153374233128833,
						"acc_stderr,none": 0.034089978868575295,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04109974682633932,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.14563106796116504,
						"acc_stderr,none": 0.034926064766237906,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2606837606837607,
						"acc_stderr,none": 0.028760348956523414,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542128,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.23116219667943805,
						"acc_stderr,none": 0.015075523238101083,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.26011560693641617,
						"acc_stderr,none": 0.023618678310069363,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2446927374301676,
						"acc_stderr,none": 0.014378169884098414,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.238562091503268,
						"acc_stderr,none": 0.02440439492808787,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2349533311876408,
						"acc_stderr,none": 0.04520996219916423,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2733118971061093,
						"acc_stderr,none": 0.02531176597542612,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.02409347123262133,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.24822695035460993,
						"acc_stderr,none": 0.02577001564429038,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.25554106910039115,
						"acc_stderr,none": 0.011139857833598516,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.21691176470588236,
						"acc_stderr,none": 0.025035845227711264,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2434640522875817,
						"acc_stderr,none": 0.017362473762146627,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.22727272727272727,
						"acc_stderr,none": 0.04013964554072773,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.19183673469387755,
						"acc_stderr,none": 0.025206963154225413,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2333441663958401,
						"acc_stderr,none": 0.03296316439382121,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.26865671641791045,
						"acc_stderr,none": 0.03134328358208954,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2534094513162069,
						"acc_stderr,none": 0.050273874905870146,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.04512608598542126,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.2289156626506024,
						"acc_stderr,none": 0.03270745277352477,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3157894736842105,
						"acc_stderr,none": 0.035650796707083106,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.35272542027508913,
						"acc_stderr,none": 0.004823248397460998,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3509967453213995,
						"acc_stderr,none": 0.004813668005153421,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.5416666666666666,
						"acc_stderr,none": 0.02469787409849239,
						"alias": "mrpc",
						"f1,none": 0.6618444846292948,
						"f1_stderr,none": 0.023334820124160573
					},
					"multimedqa": {
						"acc,none": 0.26132008516678495,
						"acc_norm,none": 0.23279417315737264,
						"acc_norm_stderr,none": 7.885611105649266e-05,
						"acc_stderr,none": 0.10154046895873245,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5521864686468647,
						"acc_stderr,none": 0.007142577745727249,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6565086549951046,
						"mrr_stderr,none": 0.01036173201159806,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.435665914221219,
						"r@2_stderr,none": 0.016667608744233644
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6234951110443196,
						"mrr_stderr,none": 0.010318210087039177,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4751693002257336,
						"r@2_stderr,none": 0.01678657778150066
					},
					"openbookqa": {
						"acc,none": 0.258,
						"acc_norm,none": 0.366,
						"acc_norm_stderr,none": 0.021564276850201618,
						"acc_stderr,none": 0.019586711785215837,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.422,
						"acc_stderr,none": 0.011046221503516777,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4245,
						"acc_stderr,none": 0.011054907529701138,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4425,
						"acc_stderr,none": 0.01110894141174761,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5275,
						"acc_stderr,none": 0.01116620871686354,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.559,
						"acc_stderr,none": 0.011105006104468736,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5035,
						"acc_stderr,none": 0.011182862030875627,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4775,
						"acc_stderr,none": 0.01117180735780118,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4795,
						"acc_stderr,none": 0.03837052780781795,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7415669205658324,
						"acc_norm,none": 0.7388465723612623,
						"acc_norm_stderr,none": 0.01024873864993557,
						"acc_stderr,none": 0.010213971636773336,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2411400512382579,
						"acc_norm,none": 0.28239752348420155,
						"acc_norm_stderr,none": 0.0032888612930581725,
						"acc_stderr,none": 0.0031252779064217706,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.65,
						"acc_stderr,none": 0.021352091786223104,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7069152363601823,
						"acc_norm,none": 0.5231949841923784,
						"acc_norm_stderr,none": 0.010219545017061498,
						"acc_stderr,none": 0.13640941378521113,
						"alias": "pythia",
						"bits_per_byte,none": 0.7037479522024591,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6287305519242088,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.047479080179978,
						"perplexity_stderr,none": 0.12068885557779292,
						"word_perplexity,none": 13.578549189135115,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.30319148936170215,
						"acc_norm,none": 0.3882978723404255,
						"acc_norm_stderr,none": 0.05027948481310724,
						"acc_stderr,none": 0.0422577598480595,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.35,
						"acc_norm,none": 0.475,
						"acc_norm_stderr,none": 0.04577759534198058,
						"acc_stderr,none": 0.043723731609760265,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.25,
						"acc_norm,none": 0.3625,
						"acc_norm_stderr,none": 0.038123743406448904,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.31338028169014087,
						"acc_norm,none": 0.36619718309859156,
						"acc_norm_stderr,none": 0.02863791293383347,
						"acc_stderr,none": 0.027574062217983555,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5229727256086399,
						"acc_stderr,none": 0.006758265940880552,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5183774424931982,
						"acc_stderr,none": 0.002485020395273573,
						"alias": "qqp",
						"f1,none": 0.2219913696659741,
						"f1_stderr,none": 0.0035040778927957956
					},
					"race": {
						"acc,none": 0.3406698564593301,
						"acc_stderr,none": 0.014667904380876562,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.48014440433212996,
						"acc_stderr,none": 0.0300727231673172,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.871,
						"acc_norm,none": 0.828,
						"acc_norm_stderr,none": 0.011939788882495321,
						"acc_stderr,none": 0.010605256784796584,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.48014440433212996,
						"acc_stderr,none": 0.0300727231673172,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.49655963302752293,
						"acc_stderr,none": 0.016941452632752724,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5288913326002199,
						"acc_norm,none": 0.720333899830051,
						"acc_norm_stderr,none": 0.003173346986465535,
						"acc_stderr,none": 0.00352918556572768,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5520614954577219,
						"acc_stderr,none": 0.03344713688963425,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5093149038461539,
						"acc_stderr,none": 0.00500338693827354,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.6375798114928549,
						"acc_stderr,none": 0.004839526963605133,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5111764705882353,
						"acc_stderr,none": 0.0049497433728747445,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.28209200611068425,
						"acc_stderr,none": 0.0014320003893629608,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2827417380660955,
						"bleu_acc_stderr,none": 0.015764770836777308,
						"bleu_diff,none": -7.540965232122667,
						"bleu_diff_stderr,none": 0.708780399154416,
						"bleu_max,none": 22.21115890836555,
						"bleu_max_stderr,none": 0.6911388759354934,
						"rouge1_acc,none": 0.2594859241126071,
						"rouge1_acc_stderr,none": 0.015345409485557994,
						"rouge1_diff,none": -10.223385023753384,
						"rouge1_diff_stderr,none": 0.7949966578940945,
						"rouge1_max,none": 47.10118109447428,
						"rouge1_max_stderr,none": 0.8454421627090442,
						"rouge2_acc,none": 0.18237454100367198,
						"rouge2_acc_stderr,none": 0.013518055636187212,
						"rouge2_diff,none": -12.44433338431279,
						"rouge2_diff_stderr,none": 0.9074252477600858,
						"rouge2_max,none": 29.80648575141351,
						"rouge2_max_stderr,none": 0.9368742186398287,
						"rougeL_acc,none": 0.23378212974296206,
						"rougeL_acc_stderr,none": 0.014816195991931591,
						"rougeL_diff,none": -10.398652019578794,
						"rougeL_diff_stderr,none": 0.7925931835498271,
						"rougeL_max,none": 44.07569039028453,
						"rougeL_max_stderr,none": 0.8527266622513323
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2827417380660955,
						"bleu_acc_stderr,none": 0.015764770836777308,
						"bleu_diff,none": -7.540965232122667,
						"bleu_diff_stderr,none": 0.708780399154416,
						"bleu_max,none": 22.21115890836555,
						"bleu_max_stderr,none": 0.6911388759354934,
						"rouge1_acc,none": 0.2594859241126071,
						"rouge1_acc_stderr,none": 0.015345409485557994,
						"rouge1_diff,none": -10.223385023753384,
						"rouge1_diff_stderr,none": 0.7949966578940945,
						"rouge1_max,none": 47.10118109447428,
						"rouge1_max_stderr,none": 0.8454421627090442,
						"rouge2_acc,none": 0.18237454100367198,
						"rouge2_acc_stderr,none": 0.013518055636187212,
						"rouge2_diff,none": -12.44433338431279,
						"rouge2_diff_stderr,none": 0.9074252477600858,
						"rouge2_max,none": 29.80648575141351,
						"rouge2_max_stderr,none": 0.9368742186398287,
						"rougeL_acc,none": 0.23378212974296206,
						"rougeL_acc_stderr,none": 0.014816195991931591,
						"rougeL_diff,none": -10.398652019578794,
						"rougeL_diff_stderr,none": 0.7925931835498271,
						"rougeL_max,none": 44.07569039028453,
						"rougeL_max_stderr,none": 0.8527266622513323
					},
					"truthfulqa_mc1": {
						"acc,none": 0.211750305997552,
						"acc_stderr,none": 0.01430206835392561,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.35243370622381653,
						"acc_stderr,none": 0.013571930574793346,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.04625984251968504,
						"exact_match_stderr,none": 0.004660818035610495
					},
					"wic": {
						"acc,none": 0.49686520376175547,
						"acc_stderr,none": 0.01981033193209754,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.7037586072287029,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6287425809604,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 13.57908546557181,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6132596685082873,
						"acc_stderr,none": 0.01368721476188304,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5070422535211268,
						"acc_stderr,none": 0.05975550263548289,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.34615384615384615,
						"acc_stderr,none": 0.04687634642174987,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.73992673992674,
						"acc_stderr,none": 0.02659853762760147,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5441818181818182,
						"acc_stderr,none": 0.031389855237621694,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.022352791650914156,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.022380208834928028,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.022242244375731017,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.572,
						"acc_stderr,none": 0.02214979066386193,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668093,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.564,
						"acc_stderr,none": 0.0221989546414768,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.568,
						"acc_stderr,none": 0.022175109265613162,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.022324981738385253,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.57,
						"acc_stderr,none": 0.022162634426652835,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.562,
						"acc_stderr,none": 0.022210326363977417,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.41563587684069614,
						"acc_stderr,none": 0.044093749051022736,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3353413654618474,
						"acc_stderr,none": 0.009463034891512706,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.41485943775100403,
						"acc_stderr,none": 0.009875705744164676,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4562248995983936,
						"acc_stderr,none": 0.009983589197693925,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.41526104417670684,
						"acc_stderr,none": 0.009877093420328584,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5417670682730924,
						"acc_stderr,none": 0.009987044882812574,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.42369477911646586,
						"acc_stderr,none": 0.009904678540828906,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.45823293172690766,
						"acc_stderr,none": 0.009987044882812574,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.39196787148594375,
						"acc_stderr,none": 0.009785342947722884,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4461847389558233,
						"acc_stderr,none": 0.009963854274139159,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3746987951807229,
						"acc_stderr,none": 0.009702269475407222,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.41967871485943775,
						"acc_stderr,none": 0.009891912665432365,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3899598393574297,
						"acc_stderr,none": 0.00977634921819301,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3457831325301205,
						"acc_stderr,none": 0.009533455033752766,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.43614457831325304,
						"acc_stderr,none": 0.009940006562498604,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.38473895582329315,
						"acc_stderr,none": 0.00975214930715252,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.542205643463089,
						"acc_stderr,none": 0.05184065235877343,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5056254136333554,
						"acc_stderr,none": 0.01286631092307251,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.700860357379219,
						"acc_stderr,none": 0.011783227411626315,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5890138980807412,
						"acc_stderr,none": 0.01266157889436895,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5155526141628061,
						"acc_stderr,none": 0.01286089911147079,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5082726671078756,
						"acc_stderr,none": 0.012865364020375391,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5188616810059563,
						"acc_stderr,none": 0.012857966762464992,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4923891462607545,
						"acc_stderr,none": 0.012865634571114483,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5486432825943084,
						"acc_stderr,none": 0.0128060889661224,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5023163467902052,
						"acc_stderr,none": 0.012866987239478045,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5433487756452681,
						"acc_stderr,none": 0.012818676452481964,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5393778954334878,
						"acc_stderr,none": 0.012827159238891913,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7244324567318499,
						"acc_stderr,none": 0.04981857414933562,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8154838709677419,
						"acc_stderr,none": 0.00804649588477837,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6385542168674698,
						"acc_stderr,none": 0.053053439348320096,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6037539103232534,
						"acc_stderr,none": 0.015802642616557245,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6768060836501901,
						"acc_stderr,none": 0.028894359362917902,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6126984126984127,
						"acc_stderr,none": 0.027490535011305765,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6428571428571429,
						"acc_stderr,none": 0.02136457356112441,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "state-spaces/mamba-1.4b-hf"
	},
	"state-spaces/mamba-2.8b-hf": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5786358511837654,
						"acc_norm,none": 0.5493235625704622,
						"acc_norm_stderr,none": 0.08816069398587968,
						"acc_stderr,none": 0.1137888542922427,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.33875,
						"acc_stderr,none": 0.01882511845309368,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.32275,
						"acc_stderr,none": 0.24260179121937714,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8186268656716418,
						"acc_stderr,none": 0.1611525092460901,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.23699851411589895,
						"acc_norm,none": 0.23699851411589895,
						"acc_norm_stderr,none": 0.11076497549031067,
						"acc_stderr,none": 0.11076497549031067,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2525470557762046,
						"acc_norm,none": 0.2525470557762046,
						"acc_norm_stderr,none": 0.03671097126464304,
						"acc_stderr,none": 0.03671097126464304,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.6487133615274265,
						"likelihood_diff_stderr,none": 0.44717347330858914,
						"pct_stereotype,none": 0.6137447823494335,
						"pct_stereotype_stderr,none": 0.08228066143842372
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0359251968503937,
						"exact_match_stderr,none": 0.004129523888282568
					},
					"glue": {
						"acc,none": 0.4634498570747975,
						"acc_stderr,none": 0.003239245623728341,
						"alias": "glue",
						"f1,none": 0.29779959292414865,
						"f1_stderr,none": 0.0002793132294303884,
						"mcc,none": -0.009366346179206609,
						"mcc_stderr,none": 0.030597643630042515
					},
					"kmmlu": {
						"acc,none": 0.29688131677736074,
						"acc_norm,none": 0.29688131677736074,
						"acc_norm_stderr,none": 0.03099161898661731,
						"acc_stderr,none": 0.03099161898661731,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.49660162245121686,
						"acc_norm,none": 0.452,
						"acc_norm_stderr,none": 0.0004963847695390727,
						"acc_stderr,none": 0.03848474421490906,
						"alias": "kobest",
						"f1,none": 0.404458388962019,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6445759751601009,
						"acc_stderr,none": 0.019553217069408043,
						"alias": "lambada",
						"perplexity,none": 4.939995648569303,
						"perplexity_stderr,none": 0.384096274513576
					},
					"lambada_cloze": {
						"acc,none": 0.015913060353192315,
						"acc_stderr,none": 0.0034757626126060174,
						"alias": "lambada_cloze",
						"perplexity,none": 550.3879549032538,
						"perplexity_stderr,none": 19.364961419922356
					},
					"lambada_multilingual": {
						"acc,none": 0.43524160683097224,
						"acc_stderr,none": 0.08914399127428199,
						"alias": "lambada_multilingual",
						"perplexity,none": 53.74574184344065,
						"perplexity_stderr,none": 19.125461313064765
					},
					"mmlu": {
						"acc,none": 0.2570858852015382,
						"acc_stderr,none": 0.03735937843684266,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.24952178533475028,
						"acc_stderr,none": 0.029834088500318552,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2745413582233666,
						"acc_stderr,none": 0.041946549375474165,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.24244393890152746,
						"acc_stderr,none": 0.032338006087189786,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.26546146527117026,
						"acc_stderr,none": 0.04232111490669083,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.29737402413058905,
						"acc_norm,none": 0.2647179445941056,
						"acc_norm_stderr,none": 8.498507738732542e-05,
						"acc_stderr,none": 0.11338523752503174,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4355,
						"acc_stderr,none": 0.05530290478323628,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7111206938271635,
						"acc_norm,none": 0.5551597643109178,
						"acc_norm_stderr,none": 0.009956089462458566,
						"acc_stderr,none": 0.1524940397519855,
						"alias": "pythia",
						"bits_per_byte,none": 0.6655260472001193,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5861465214371127,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.2109339571254605,
						"perplexity_stderr,none": 0.09447809444003981,
						"word_perplexity,none": 11.784900611347632,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3351063829787234,
						"acc_norm,none": 0.41843971631205673,
						"acc_norm_stderr,none": 0.06222883032359386,
						"acc_stderr,none": 0.047898574390541536,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5490000332767628,
						"acc_stderr,none": 0.022098657972730456,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.28899459953341194,
						"acc_stderr,none": 0.001417793278876567,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2962056303549572,
						"bleu_acc_stderr,none": 0.015983595101811392,
						"bleu_diff,none": -7.712989173818157,
						"bleu_diff_stderr,none": 0.7761644940195728,
						"bleu_max,none": 24.119256687901157,
						"bleu_max_stderr,none": 0.7545894176850129,
						"rouge1_acc,none": 0.26438188494492043,
						"rouge1_acc_stderr,none": 0.015438211119522502,
						"rouge1_diff,none": -10.499797833766538,
						"rouge1_diff_stderr,none": 0.7998946584915853,
						"rouge1_max,none": 49.16599266502819,
						"rouge1_max_stderr,none": 0.86748579893984,
						"rouge2_acc,none": 0.22399020807833536,
						"rouge2_acc_stderr,none": 0.014594964329474207,
						"rouge2_diff,none": -12.087376053924952,
						"rouge2_diff_stderr,none": 0.9838091389086047,
						"rouge2_max,none": 32.82913940889599,
						"rouge2_max_stderr,none": 0.9770875656132315,
						"rougeL_acc,none": 0.2521419828641371,
						"rougeL_acc_stderr,none": 0.015201522246299969,
						"rougeL_diff,none": -10.93455550712026,
						"rougeL_diff_stderr,none": 0.8131722241769852,
						"rougeL_max,none": 46.06771389346665,
						"rougeL_max_stderr,none": 0.884616523271091
					},
					"xcopa": {
						"acc,none": 0.5529090909090909,
						"acc_stderr,none": 0.03557018585454539,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.4210441767068273,
						"acc_stderr,none": 0.04877213923480642,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5632633415558631,
						"acc_stderr,none": 0.057373890886261665,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7561249719037986,
						"acc_stderr,none": 0.046369938156017504,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5786358511837654,
						"acc_norm,none": 0.5493235625704622,
						"acc_norm_stderr,none": 0.08816069398587968,
						"acc_stderr,none": 0.1137888542922427,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.33875,
						"acc_stderr,none": 0.01882511845309368,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.316,
						"acc_stderr,none": 0.014709193056057134,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.014933117490932579,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.36083333333333334,
						"acc_stderr,none": 0.013869180252444864,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3378839590443686,
						"acc_norm,none": 0.363481228668942,
						"acc_norm_stderr,none": 0.014056207319068282,
						"acc_stderr,none": 0.013822047922283514,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6973905723905723,
						"acc_norm,none": 0.640993265993266,
						"acc_norm_stderr,none": 0.009843424713072174,
						"acc_stderr,none": 0.009426434542371222,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.32275,
						"acc_stderr,none": 0.24260179121937714,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.235,
						"acc_stderr,none": 0.009483272970505337,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.004920529016569339,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.4805,
						"acc_stderr,none": 0.011174628009718267,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.5115,
						"acc_stderr,none": 0.011180177690296078,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.4255,
						"acc_stderr,none": 0.01105830086446394,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.4255,
						"acc_stderr,none": 0.01105830086446394,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.078,
						"acc_stderr,none": 0.0059979986657215,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.1035,
						"acc_stderr,none": 0.006813008406113376,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.01,
						"acc_stderr,none": 0.002225415969682749,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.009,
						"acc_stderr,none": 0.002112280962711329,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.013015184381778741,
						"acc_stderr,none": 0.0023612347211882253,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8186268656716418,
						"acc_stderr,none": 0.1611525092460901,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.00936368937324811,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448847,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437794,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.795,
						"acc_stderr,none": 0.012772554096113123,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.00936368937324813,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.769,
						"acc_stderr,none": 0.013334797216936442,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.592,
						"acc_stderr,none": 0.015549205052920676,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.01323250161908534,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.869,
						"acc_stderr,none": 0.010674874844837957,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.00263779414624378,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.979,
						"acc_stderr,none": 0.0045364721513065365,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.953,
						"acc_stderr,none": 0.006695956678163039,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817149,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706822,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696837,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.905,
						"acc_stderr,none": 0.00927691010310331,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.0047427305946568,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.863,
						"acc_stderr,none": 0.01087884871433333,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.75,
						"acc_stderr,none": 0.013699915608779773,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.81,
						"acc_stderr,none": 0.012411851354816329,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.798,
						"acc_stderr,none": 0.012702651587655144,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.893,
						"acc_stderr,none": 0.009779910359847165,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698451,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.156,
						"acc_stderr,none": 0.011480235006122368,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651528,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.787,
						"acc_stderr,none": 0.012953717566737228,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.713,
						"acc_stderr,none": 0.014312087053809963,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.01123486636423525,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.0040899544896891,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.009099549538400246,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286417,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745923,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.762,
						"acc_stderr,none": 0.013473586661967213,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.010534798620855768,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.511,
						"acc_stderr,none": 0.01581547119529269,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.609,
						"acc_stderr,none": 0.015438826294681789,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.68,
						"acc_stderr,none": 0.01475865230357489,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.797,
						"acc_stderr,none": 0.01272607374459827,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.597,
						"acc_stderr,none": 0.015518757419066538,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340966,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.009188875634996672,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.708,
						"acc_stderr,none": 0.014385511563477343,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.0046408552592747026,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844878,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386702,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.712,
						"acc_stderr,none": 0.01432694179723156,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.415,
						"acc_stderr,none": 0.015589035185604628,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571418,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240674,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.003717232548256598,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.768,
						"acc_stderr,none": 0.01335493745228156,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.413,
						"acc_stderr,none": 0.01557798682993653,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662734,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.903,
						"acc_stderr,none": 0.009363689373248111,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.703,
						"acc_stderr,none": 0.0144568322948011,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.854,
						"acc_stderr,none": 0.011171786285496497,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.876,
						"acc_stderr,none": 0.010427498872343973,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.753,
						"acc_stderr,none": 0.013644675781314133,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.771,
						"acc_stderr,none": 0.013294199326613621,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240653,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695806,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427418,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666662,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.431,
						"acc_stderr,none": 0.015667944488173487,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.328,
						"acc_stderr,none": 0.014853842487270334,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6535168195718655,
						"acc_stderr,none": 0.008322657125583485,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.5535714285714286,
						"acc_stderr,none": 0.06703189227942395,
						"alias": "cb",
						"f1,none": 0.3862433862433862,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.23699851411589895,
						"acc_norm,none": 0.23699851411589895,
						"acc_norm_stderr,none": 0.11076497549031067,
						"acc_stderr,none": 0.11076497549031067,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.07226812131946557,
						"acc_stderr,none": 0.07226812131946557,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482894,
						"acc_stderr,none": 0.09810018692482894,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.1702127659574468,
						"acc_norm,none": 0.1702127659574468,
						"acc_norm_stderr,none": 0.055411578656325386,
						"acc_stderr,none": 0.055411578656325386,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252413,
						"acc_stderr,none": 0.09477598811252413,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.05781449705557244,
						"acc_stderr,none": 0.05781449705557244,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.16216216216216217,
						"acc_norm,none": 0.16216216216216217,
						"acc_norm_stderr,none": 0.06143325088732367,
						"acc_stderr,none": 0.06143325088732367,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.11967838846954226,
						"acc_stderr,none": 0.11967838846954226,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.32432432432432434,
						"acc_norm,none": 0.32432432432432434,
						"acc_norm_stderr,none": 0.07802030664724673,
						"acc_stderr,none": 0.07802030664724673,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.12903225806451613,
						"acc_norm,none": 0.12903225806451613,
						"acc_norm_stderr,none": 0.06120537406777509,
						"acc_stderr,none": 0.06120537406777509,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031764,
						"acc_stderr,none": 0.07633651333031764,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.1008316903303367,
						"acc_stderr,none": 0.1008316903303367,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.08695652173913043,
						"acc_norm,none": 0.08695652173913043,
						"acc_norm_stderr,none": 0.060073850409370216,
						"acc_stderr,none": 0.060073850409370216,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.13636363636363635,
						"acc_norm,none": 0.13636363636363635,
						"acc_norm_stderr,none": 0.07488677009526491,
						"acc_stderr,none": 0.07488677009526491,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.14285714285714285,
						"acc_norm,none": 0.14285714285714285,
						"acc_norm_stderr,none": 0.07824607964359517,
						"acc_stderr,none": 0.07824607964359517,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09933992677987828,
						"acc_stderr,none": 0.09933992677987828,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.32653061224489793,
						"acc_norm,none": 0.32653061224489793,
						"acc_norm_stderr,none": 0.06768622021133469,
						"acc_stderr,none": 0.06768622021133469,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.10497277621629558,
						"acc_stderr,none": 0.10497277621629558,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.09038769075777339,
						"acc_stderr,none": 0.09038769075777339,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.05817221556628254,
						"acc_stderr,none": 0.05817221556628254,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.06390760676613884,
						"acc_stderr,none": 0.06390760676613884,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.06545849153992007,
						"acc_stderr,none": 0.06545849153992007,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996391,
						"acc_stderr,none": 0.08081046758996391,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2525470557762046,
						"acc_norm,none": 0.2525470557762046,
						"acc_norm_stderr,none": 0.03671097126464304,
						"acc_stderr,none": 0.03671097126464304,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.034949590161775394,
						"acc_stderr,none": 0.034949590161775394,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.034049163262375844,
						"acc_stderr,none": 0.034049163262375844,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2606060606060606,
						"acc_norm,none": 0.2606060606060606,
						"acc_norm_stderr,none": 0.03427743175816525,
						"acc_stderr,none": 0.03427743175816525,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2535885167464115,
						"acc_norm,none": 0.2535885167464115,
						"acc_norm_stderr,none": 0.03016631629884799,
						"acc_stderr,none": 0.03016631629884799,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2375,
						"acc_norm,none": 0.2375,
						"acc_norm_stderr,none": 0.03374839851779222,
						"acc_stderr,none": 0.03374839851779222,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.25190839694656486,
						"acc_norm,none": 0.25190839694656486,
						"acc_norm_stderr,none": 0.03807387116306086,
						"acc_stderr,none": 0.03807387116306086,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037267799624996496,
						"acc_stderr,none": 0.037267799624996496,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.04218811928205305,
						"acc_stderr,none": 0.04218811928205305,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.24458204334365324,
						"acc_norm,none": 0.24458204334365324,
						"acc_norm_stderr,none": 0.023953997540932175,
						"acc_stderr,none": 0.023953997540932175,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.25980392156862747,
						"acc_norm,none": 0.25980392156862747,
						"acc_norm_stderr,none": 0.030778554678693247,
						"acc_stderr,none": 0.030778554678693247,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.2346368715083799,
						"acc_norm,none": 0.2346368715083799,
						"acc_norm_stderr,none": 0.03176302794175762,
						"acc_stderr,none": 0.03176302794175762,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.02830465794303529,
						"acc_stderr,none": 0.02830465794303529,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.20754716981132076,
						"acc_norm,none": 0.20754716981132076,
						"acc_norm_stderr,none": 0.039577692383779346,
						"acc_stderr,none": 0.039577692383779346,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.04556837693674772,
						"acc_stderr,none": 0.04556837693674772,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.32075471698113206,
						"acc_norm,none": 0.32075471698113206,
						"acc_norm_stderr,none": 0.04555176317903525,
						"acc_stderr,none": 0.04555176317903525,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.03957835471980982,
						"acc_stderr,none": 0.03957835471980982,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2641509433962264,
						"acc_norm,none": 0.2641509433962264,
						"acc_norm_stderr,none": 0.043025487739590106,
						"acc_stderr,none": 0.043025487739590106,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.24175824175824176,
						"acc_norm,none": 0.24175824175824176,
						"acc_norm_stderr,none": 0.025960319996852693,
						"acc_stderr,none": 0.025960319996852693,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.25980392156862747,
						"acc_norm,none": 0.25980392156862747,
						"acc_norm_stderr,none": 0.030778554678693257,
						"acc_stderr,none": 0.030778554678693257,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.24561403508771928,
						"acc_norm,none": 0.24561403508771928,
						"acc_norm_stderr,none": 0.03301405946987251,
						"acc_stderr,none": 0.03301405946987251,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2589928057553957,
						"acc_norm,none": 0.2589928057553957,
						"acc_norm_stderr,none": 0.03729198658164233,
						"acc_stderr,none": 0.03729198658164233,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.034229240176444506,
						"acc_stderr,none": 0.034229240176444506,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.26380368098159507,
						"acc_norm,none": 0.26380368098159507,
						"acc_norm_stderr,none": 0.034624199316156234,
						"acc_stderr,none": 0.034624199316156234,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2441860465116279,
						"acc_norm,none": 0.2441860465116279,
						"acc_norm_stderr,none": 0.03285260554707745,
						"acc_stderr,none": 0.03285260554707745,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.26587301587301587,
						"acc_norm,none": 0.26587301587301587,
						"acc_norm_stderr,none": 0.02788597694851165,
						"acc_stderr,none": 0.02788597694851165,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.030532892233932032,
						"acc_stderr,none": 0.030532892233932032,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2815126050420168,
						"acc_norm,none": 0.2815126050420168,
						"acc_norm_stderr,none": 0.02921354941437216,
						"acc_stderr,none": 0.02921354941437216,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2782608695652174,
						"acc_norm,none": 0.2782608695652174,
						"acc_norm_stderr,none": 0.02961409422163373,
						"acc_stderr,none": 0.02961409422163373,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.037857144650666544,
						"acc_stderr,none": 0.037857144650666544,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.03642192783741706,
						"acc_stderr,none": 0.03642192783741706,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.23295454545454544,
						"acc_norm,none": 0.23295454545454544,
						"acc_norm_stderr,none": 0.031954139030501774,
						"acc_stderr,none": 0.031954139030501774,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2483221476510067,
						"acc_norm,none": 0.2483221476510067,
						"acc_norm_stderr,none": 0.03551344041697431,
						"acc_stderr,none": 0.03551344041697431,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.0374425492857706,
						"acc_stderr,none": 0.0374425492857706,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2711864406779661,
						"acc_norm,none": 0.2711864406779661,
						"acc_norm_stderr,none": 0.04110070549339208,
						"acc_stderr,none": 0.04110070549339208,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.27439024390243905,
						"acc_norm,none": 0.27439024390243905,
						"acc_norm_stderr,none": 0.03494959016177541,
						"acc_stderr,none": 0.03494959016177541,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.04069306319721376,
						"acc_stderr,none": 0.04069306319721376,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.23776223776223776,
						"acc_norm,none": 0.23776223776223776,
						"acc_norm_stderr,none": 0.0357250214181557,
						"acc_stderr,none": 0.0357250214181557,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604674,
						"acc_stderr,none": 0.03893259610604674,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.03162930395697951,
						"acc_stderr,none": 0.03162930395697951,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.27906976744186046,
						"acc_norm,none": 0.27906976744186046,
						"acc_norm_stderr,none": 0.03430085607014882,
						"acc_stderr,none": 0.03430085607014882,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.24817518248175183,
						"acc_norm,none": 0.24817518248175183,
						"acc_norm_stderr,none": 0.021332687690541908,
						"acc_stderr,none": 0.021332687690541908,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2570093457943925,
						"acc_norm,none": 0.2570093457943925,
						"acc_norm_stderr,none": 0.02994169153324464,
						"acc_stderr,none": 0.02994169153324464,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2601626016260163,
						"acc_norm,none": 0.2601626016260163,
						"acc_norm_stderr,none": 0.039720129754505354,
						"acc_stderr,none": 0.039720129754505354,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2540983606557377,
						"acc_norm,none": 0.2540983606557377,
						"acc_norm_stderr,none": 0.03957756102798664,
						"acc_stderr,none": 0.03957756102798664,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24285714285714285,
						"acc_norm,none": 0.24285714285714285,
						"acc_norm_stderr,none": 0.029661370413965837,
						"acc_stderr,none": 0.029661370413965837,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2388888888888889,
						"acc_norm,none": 0.2388888888888889,
						"acc_norm_stderr,none": 0.03187098535605761,
						"acc_stderr,none": 0.03187098535605761,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871163,
						"acc_stderr,none": 0.03152480234871163,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.27586206896551724,
						"acc_norm,none": 0.27586206896551724,
						"acc_norm_stderr,none": 0.04167808180844153,
						"acc_stderr,none": 0.04167808180844153,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.25517241379310346,
						"acc_norm,none": 0.25517241379310346,
						"acc_norm_stderr,none": 0.03632984052707842,
						"acc_stderr,none": 0.03632984052707842,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.032888897342098225,
						"acc_stderr,none": 0.032888897342098225,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.24644549763033174,
						"acc_norm,none": 0.24644549763033174,
						"acc_norm_stderr,none": 0.029737751726596828,
						"acc_stderr,none": 0.029737751726596828,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.022360679774997897,
						"acc_stderr,none": 0.022360679774997897,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.24568965517241378,
						"acc_norm,none": 0.24568965517241378,
						"acc_norm_stderr,none": 0.02832451468417114,
						"acc_stderr,none": 0.02832451468417114,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2471264367816092,
						"acc_norm,none": 0.2471264367816092,
						"acc_norm_stderr,none": 0.03279424038543969,
						"acc_stderr,none": 0.03279424038543969,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.037857144650666544,
						"acc_stderr,none": 0.037857144650666544,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.028952167450890805,
						"acc_stderr,none": 0.028952167450890805,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139405,
						"acc_stderr,none": 0.03374402644139405,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323176,
						"acc_stderr,none": 0.03307162750323176,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.19875776397515527,
						"acc_norm,none": 0.19875776397515527,
						"acc_norm_stderr,none": 0.031548882234038005,
						"acc_stderr,none": 0.031548882234038005,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.009366346179206609,
						"mcc_stderr,none": 0.030597643630042515
					},
					"copa": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.03684529491774711,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.6487133615274265,
						"likelihood_diff_stderr,none": 0.44717347330858914,
						"pct_stereotype,none": 0.6137447823494335,
						"pct_stereotype_stderr,none": 0.08228066143842372
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.561860350106682,
						"likelihood_diff_stderr,none": 0.08552947554051259,
						"pct_stereotype,none": 0.655933214072749,
						"pct_stereotype_stderr,none": 0.011604172587877418
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.8654859773405303,
						"likelihood_diff_stderr,none": 0.4021605444941542,
						"pct_stereotype,none": 0.7362637362637363,
						"pct_stereotype_stderr,none": 0.04644942852497396
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 4.810830549760298,
						"likelihood_diff_stderr,none": 1.3590360876527747,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.027229485144982,
						"likelihood_diff_stderr,none": 0.5938736583582105,
						"pct_stereotype,none": 0.7230769230769231,
						"pct_stereotype_stderr,none": 0.055934767585573
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.7515016555786134,
						"likelihood_diff_stderr,none": 0.18190639782435664,
						"pct_stereotype,none": 0.66875,
						"pct_stereotype_stderr,none": 0.02635205567992741
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.5792474393491394,
						"likelihood_diff_stderr,none": 0.24286712088724136,
						"pct_stereotype,none": 0.5648148148148148,
						"pct_stereotype_stderr,none": 0.03381200005643525
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.5415503448910184,
						"likelihood_diff_stderr,none": 0.3427962512871603,
						"pct_stereotype,none": 0.8194444444444444,
						"pct_stereotype_stderr,none": 0.04564949854152483
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.2623563188267504,
						"likelihood_diff_stderr,none": 0.1402708143728044,
						"pct_stereotype,none": 0.5551181102362205,
						"pct_stereotype_stderr,none": 0.022070444592370703
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.5325009114033468,
						"likelihood_diff_stderr,none": 0.3063320418715578,
						"pct_stereotype,none": 0.7837837837837838,
						"pct_stereotype_stderr,none": 0.039250566187156465
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.5967047906691025,
						"likelihood_diff_stderr,none": 0.4354638905424235,
						"pct_stereotype,none": 0.9032258064516129,
						"pct_stereotype_stderr,none": 0.03082364793244869
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.115864151402524,
						"likelihood_diff_stderr,none": 0.24708211744613112,
						"pct_stereotype,none": 0.6947368421052632,
						"pct_stereotype_stderr,none": 0.03349781342677419
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.744260447893956,
						"likelihood_diff_stderr,none": 0.08797472808052915,
						"pct_stereotype,none": 0.5718545020870602,
						"pct_stereotype_stderr,none": 0.012086525035273394
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.5559811062282987,
						"likelihood_diff_stderr,none": 0.3283249122238594,
						"pct_stereotype,none": 0.5444444444444444,
						"pct_stereotype_stderr,none": 0.05279009646630345
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.16158206646259,
						"likelihood_diff_stderr,none": 0.8652988471929067,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.14044168141158106
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.137011152325255,
						"likelihood_diff_stderr,none": 0.4780155160381258,
						"pct_stereotype,none": 0.7727272727272727,
						"pct_stereotype_stderr,none": 0.051979261354260516
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.0782772729701344,
						"likelihood_diff_stderr,none": 0.1563649301128781,
						"pct_stereotype,none": 0.5545171339563862,
						"pct_stereotype_stderr,none": 0.0277842065133898
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.987395599425546,
						"likelihood_diff_stderr,none": 0.21949591268298546,
						"pct_stereotype,none": 0.3557312252964427,
						"pct_stereotype_stderr,none": 0.030157425619935837
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.9211238755120172,
						"likelihood_diff_stderr,none": 0.5866538895200958,
						"pct_stereotype,none": 0.6666666666666666,
						"pct_stereotype_stderr,none": 0.05594542388644592
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.6088216906008515,
						"likelihood_diff_stderr,none": 0.1738112815910215,
						"pct_stereotype,none": 0.5173913043478261,
						"pct_stereotype_stderr,none": 0.023323879622363822
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.873919627977454,
						"likelihood_diff_stderr,none": 0.3578020770241259,
						"pct_stereotype,none": 0.7130434782608696,
						"pct_stereotype_stderr,none": 0.042365626207479204
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 4.221205721844684,
						"likelihood_diff_stderr,none": 0.45417195437101754,
						"pct_stereotype,none": 0.8131868131868132,
						"pct_stereotype_stderr,none": 0.041084468550358806
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.0313538726495235,
						"likelihood_diff_stderr,none": 0.2515452351463613,
						"pct_stereotype,none": 0.6989795918367347,
						"pct_stereotype_stderr,none": 0.032848301055273386
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.0359251968503937,
						"exact_match_stderr,none": 0.004129523888282568
					},
					"glue": {
						"acc,none": 0.4634498570747975,
						"acc_stderr,none": 0.003239245623728341,
						"alias": "glue",
						"f1,none": 0.29779959292414865,
						"f1_stderr,none": 0.0002793132294303884,
						"mcc,none": -0.009366346179206609,
						"mcc_stderr,none": 0.030597643630042515
					},
					"hellaswag": {
						"acc,none": 0.4940250946026688,
						"acc_norm,none": 0.658832901812388,
						"acc_norm_stderr,none": 0.004731324409133258,
						"acc_stderr,none": 0.004989425133377905,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.29688131677736074,
						"acc_norm,none": 0.29688131677736074,
						"acc_norm_stderr,none": 0.03099161898661731,
						"acc_stderr,none": 0.03099161898661731,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.040936018074033256,
						"acc_stderr,none": 0.040936018074033256,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.324,
						"acc_norm,none": 0.324,
						"acc_norm_stderr,none": 0.014806864733738864,
						"acc_stderr,none": 0.014806864733738864,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.314,
						"acc_norm,none": 0.314,
						"acc_norm_stderr,none": 0.014683991951087967,
						"acc_stderr,none": 0.014683991951087967,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.013895037677965126,
						"acc_stderr,none": 0.013895037677965126,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.01435639599990569,
						"acc_stderr,none": 0.01435639599990569,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.01806848202433441,
						"acc_stderr,none": 0.01806848202433441,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.336,
						"acc_norm,none": 0.336,
						"acc_norm_stderr,none": 0.014944140233795023,
						"acc_stderr,none": 0.014944140233795023,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.32,
						"acc_norm,none": 0.32,
						"acc_norm_stderr,none": 0.01475865230357487,
						"acc_stderr,none": 0.01475865230357487,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.343,
						"acc_norm,none": 0.343,
						"acc_norm_stderr,none": 0.015019206922356955,
						"acc_stderr,none": 0.015019206922356955,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.245,
						"acc_norm,none": 0.245,
						"acc_norm_stderr,none": 0.03048807329211421,
						"acc_stderr,none": 0.03048807329211421,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.316,
						"acc_norm,none": 0.316,
						"acc_norm_stderr,none": 0.014709193056057128,
						"acc_stderr,none": 0.014709193056057128,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.23846153846153847,
						"acc_norm,none": 0.23846153846153847,
						"acc_norm_stderr,none": 0.03751977598816764,
						"acc_stderr,none": 0.03751977598816764,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.04560480215720684,
						"acc_stderr,none": 0.04560480215720684,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.323,
						"acc_norm,none": 0.323,
						"acc_norm_stderr,none": 0.01479492784334864,
						"acc_stderr,none": 0.01479492784334864,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.317,
						"acc_norm,none": 0.317,
						"acc_norm_stderr,none": 0.01472167543888022,
						"acc_stderr,none": 0.01472167543888022,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.289,
						"acc_norm,none": 0.289,
						"acc_norm_stderr,none": 0.014341711358296188,
						"acc_stderr,none": 0.014341711358296188,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.326,
						"acc_norm,none": 0.326,
						"acc_norm_stderr,none": 0.014830507204541047,
						"acc_stderr,none": 0.014830507204541047,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.014205696104091505,
						"acc_stderr,none": 0.014205696104091505,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.273,
						"acc_norm,none": 0.273,
						"acc_norm_stderr,none": 0.014095022868717595,
						"acc_stderr,none": 0.014095022868717595,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.325,
						"acc_norm,none": 0.325,
						"acc_norm_stderr,none": 0.014818724459095526,
						"acc_stderr,none": 0.014818724459095526,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.32,
						"acc_norm,none": 0.32,
						"acc_norm_stderr,none": 0.014758652303574883,
						"acc_stderr,none": 0.014758652303574883,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.04163331998932269,
						"acc_stderr,none": 0.04163331998932269,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.317,
						"acc_norm,none": 0.317,
						"acc_norm_stderr,none": 0.014721675438880208,
						"acc_stderr,none": 0.014721675438880208,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.312,
						"acc_norm,none": 0.312,
						"acc_norm_stderr,none": 0.014658474370509001,
						"acc_stderr,none": 0.014658474370509001,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.312,
						"acc_norm,none": 0.312,
						"acc_norm_stderr,none": 0.014658474370509014,
						"acc_stderr,none": 0.014658474370509014,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.249,
						"acc_norm,none": 0.249,
						"acc_norm_stderr,none": 0.013681600278702298,
						"acc_stderr,none": 0.013681600278702298,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.314,
						"acc_norm,none": 0.314,
						"acc_norm_stderr,none": 0.014683991951087962,
						"acc_stderr,none": 0.014683991951087962,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.013825416526895054,
						"acc_stderr,none": 0.013825416526895054,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.29833333333333334,
						"acc_norm,none": 0.29833333333333334,
						"acc_norm_stderr,none": 0.018694028559022177,
						"acc_stderr,none": 0.018694028559022177,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.243,
						"acc_norm,none": 0.243,
						"acc_norm_stderr,none": 0.013569640199177458,
						"acc_stderr,none": 0.013569640199177458,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.322,
						"acc_norm,none": 0.322,
						"acc_norm_stderr,none": 0.014782913600996673,
						"acc_stderr,none": 0.014782913600996673,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.305,
						"acc_norm,none": 0.305,
						"acc_norm_stderr,none": 0.014566646394664385,
						"acc_stderr,none": 0.014566646394664385,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.294,
						"acc_norm,none": 0.294,
						"acc_norm_stderr,none": 0.014414290540008217,
						"acc_stderr,none": 0.014414290540008217,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.040936018074033256,
						"acc_stderr,none": 0.040936018074033256,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.24333333333333335,
						"acc_norm,none": 0.24333333333333335,
						"acc_norm_stderr,none": 0.02481518457232592,
						"acc_stderr,none": 0.02481518457232592,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.013699915608779773,
						"acc_stderr,none": 0.013699915608779773,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.313,
						"acc_norm,none": 0.313,
						"acc_norm_stderr,none": 0.014671272822977886,
						"acc_stderr,none": 0.014671272822977886,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.291,
						"acc_norm,none": 0.291,
						"acc_norm_stderr,none": 0.01437099598237793,
						"acc_stderr,none": 0.01437099598237793,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.030275120389073033,
						"acc_stderr,none": 0.030275120389073033,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.282,
						"acc_norm,none": 0.282,
						"acc_norm_stderr,none": 0.014236526215291359,
						"acc_stderr,none": 0.014236526215291359,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485254,
						"acc_stderr,none": 0.014174516461485254,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.030275120389073044,
						"acc_stderr,none": 0.030275120389073044,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.304,
						"acc_norm,none": 0.304,
						"acc_norm_stderr,none": 0.014553205687950441,
						"acc_stderr,none": 0.014553205687950441,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.49660162245121686,
						"acc_norm,none": 0.452,
						"acc_norm_stderr,none": 0.0004963847695390727,
						"acc_stderr,none": 0.03848474421490906,
						"alias": "kobest",
						"f1,none": 0.404458388962019,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5192307692307693,
						"acc_stderr,none": 0.01333889049562068,
						"alias": " - kobest_boolq",
						"f1,none": 0.4050813846405912,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.015790799515836763,
						"alias": " - kobest_copa",
						"f1,none": 0.5295766189570613,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.37,
						"acc_norm,none": 0.452,
						"acc_norm_stderr,none": 0.022279694107843417,
						"acc_stderr,none": 0.021613289165165785,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3677788398497996,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5188916876574308,
						"acc_stderr,none": 0.02510800428419158,
						"alias": " - kobest_sentineg",
						"f1,none": 0.3759556905249817,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6445759751601009,
						"acc_stderr,none": 0.019553217069408043,
						"alias": "lambada",
						"perplexity,none": 4.939995648569303,
						"perplexity_stderr,none": 0.384096274513576
					},
					"lambada_cloze": {
						"acc,none": 0.015913060353192315,
						"acc_stderr,none": 0.0034757626126060174,
						"alias": "lambada_cloze",
						"perplexity,none": 550.3879549032538,
						"perplexity_stderr,none": 19.364961419922356
					},
					"lambada_multilingual": {
						"acc,none": 0.43524160683097224,
						"acc_stderr,none": 0.08914399127428199,
						"alias": "lambada_multilingual",
						"perplexity,none": 53.74574184344065,
						"perplexity_stderr,none": 19.125461313064765
					},
					"lambada_openai": {
						"acc,none": 0.6811566078012808,
						"acc_stderr,none": 0.006492684061449838,
						"alias": " - lambada_openai",
						"perplexity,none": 4.2109339571254605,
						"perplexity_stderr,none": 0.09447809444003981
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.009897147292839123,
						"acc_stderr,none": 0.0013791364776453536,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 565.160542398072,
						"perplexity_stderr,none": 19.57387099481846
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.3366970696681545,
						"acc_stderr,none": 0.006583967813636983,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 75.43344304986505,
						"perplexity_stderr,none": 4.574456531099678
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6797981758199108,
						"acc_stderr,none": 0.006500009196897406,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.20998292373094,
						"perplexity_stderr,none": 0.09449669730584608
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.34814671065398795,
						"acc_stderr,none": 0.0066369439754181015,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 74.01256266229356,
						"perplexity_stderr,none": 4.152593649054864
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.4218901610712206,
						"acc_stderr,none": 0.006880451721323675,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 47.31052884822474,
						"perplexity_stderr,none": 2.7170703315382356
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.38967591694158743,
						"acc_stderr,none": 0.00679429046327339,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 67.76219173308895,
						"perplexity_stderr,none": 4.14912740656985
					},
					"lambada_standard": {
						"acc,none": 0.6078012808072967,
						"acc_stderr,none": 0.006802146227117815,
						"alias": " - lambada_standard",
						"perplexity,none": 5.671459212127145,
						"perplexity_stderr,none": 0.136409003032659
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.021928973413545508,
						"acc_stderr,none": 0.0020403582121871905,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 535.6153674084355,
						"perplexity_stderr,none": 16.056275213621763
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.23982188295165394,
						"exact_match_stderr,get-answer": 0.010772437759520104
					},
					"logiqa": {
						"acc,none": 0.21812596006144394,
						"acc_norm,none": 0.27342549923195086,
						"acc_norm_stderr,none": 0.01748247454768128,
						"acc_stderr,none": 0.016198149258419312,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2385496183206107,
						"acc_norm,none": 0.2767175572519084,
						"acc_norm_stderr,none": 0.011287148180222289,
						"acc_stderr,none": 0.01075281254696114,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2716917922948074,
						"acc_norm,none": 0.26901172529313233,
						"acc_norm_stderr,none": 0.008117857077753504,
						"acc_stderr,none": 0.008143225327312436,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.44492692226223257,
						"acc_stderr,none": 0.005114585295639306,
						"alias": "mc_taco",
						"f1,none": 0.5235021365578689,
						"f1_stderr,none": 0.005788780773167347
					},
					"medmcqa": {
						"acc,none": 0.2620129093951709,
						"acc_norm,none": 0.2620129093951709,
						"acc_norm_stderr,none": 0.006799762013471725,
						"acc_stderr,none": 0.006799762013471725,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.2623723487824038,
						"acc_norm,none": 0.2623723487824038,
						"acc_norm_stderr,none": 0.012334855614561532,
						"acc_stderr,none": 0.012334855614561532,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.2570858852015382,
						"acc_stderr,none": 0.03735937843684266,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.2814814814814815,
						"acc_stderr,none": 0.03885004245800254,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.23026315789473684,
						"acc_stderr,none": 0.034260594244031654,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.25660377358490566,
						"acc_stderr,none": 0.02688064788905197,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.03476590104304135,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847415,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.26011560693641617,
						"acc_stderr,none": 0.03345036916788992,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.21568627450980393,
						"acc_stderr,none": 0.04092563958237656,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.04725815626252603,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.24680851063829787,
						"acc_stderr,none": 0.02818544130123408,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.04049339297748141,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.30344827586206896,
						"acc_stderr,none": 0.038312260488503336,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2698412698412698,
						"acc_stderr,none": 0.022860838309232065,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.24603174603174602,
						"acc_stderr,none": 0.03852273364924315,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.27419354838709675,
						"acc_stderr,none": 0.025378139970885207,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2561576354679803,
						"acc_stderr,none": 0.0307127300709826,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.30303030303030304,
						"acc_stderr,none": 0.03588624800091707,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2474747474747475,
						"acc_stderr,none": 0.03074630074212449,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.21243523316062177,
						"acc_stderr,none": 0.029519282616817247,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.26153846153846155,
						"acc_stderr,none": 0.022282141204204423,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.27037037037037037,
						"acc_stderr,none": 0.027080372815145668,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.24369747899159663,
						"acc_stderr,none": 0.027886828078380544,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.304635761589404,
						"acc_stderr,none": 0.03757949922943343,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.26788990825688075,
						"acc_stderr,none": 0.01898746225797865,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.25462962962962965,
						"acc_stderr,none": 0.02971127586000534,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.28921568627450983,
						"acc_stderr,none": 0.031822318676475544,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.22784810126582278,
						"acc_stderr,none": 0.027303484599069422,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.28699551569506726,
						"acc_stderr,none": 0.030360379710291957,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.26717557251908397,
						"acc_stderr,none": 0.03880848301082395,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.24952178533475028,
						"acc_stderr,none": 0.029834088500318552,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2644628099173554,
						"acc_stderr,none": 0.04026187527591206,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.26851851851851855,
						"acc_stderr,none": 0.04284467968052191,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.22699386503067484,
						"acc_stderr,none": 0.032910995786157686,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.2767857142857143,
						"acc_stderr,none": 0.04246624336697624,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.1553398058252427,
						"acc_stderr,none": 0.03586594738573974,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.23504273504273504,
						"acc_stderr,none": 0.027778835904935434,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.280970625798212,
						"acc_stderr,none": 0.016073127851221246,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2023121387283237,
						"acc_stderr,none": 0.021628077380196124,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2569832402234637,
						"acc_stderr,none": 0.014614465821966332,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2581699346405229,
						"acc_stderr,none": 0.025058503316958147,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2745413582233666,
						"acc_stderr,none": 0.041946549375474165,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2508038585209003,
						"acc_stderr,none": 0.024619771956697168,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2654320987654321,
						"acc_stderr,none": 0.024569223600460845,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2695035460992908,
						"acc_stderr,none": 0.026469036818590624,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.23989569752281617,
						"acc_stderr,none": 0.010906282617981655,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.35294117647058826,
						"acc_stderr,none": 0.029029422815681404,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.2565359477124183,
						"acc_stderr,none": 0.017667841612378995,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.20909090909090908,
						"acc_stderr,none": 0.03895091015724137,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.17551020408163265,
						"acc_stderr,none": 0.024352800722970015,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.24244393890152746,
						"acc_stderr,none": 0.032338006087189786,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.22388059701492538,
						"acc_stderr,none": 0.029475250236017193,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.26546146527117026,
						"acc_stderr,none": 0.04232111490669083,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.039427724440366234,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.25903614457831325,
						"acc_stderr,none": 0.034106466140718564,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.29239766081871343,
						"acc_stderr,none": 0.034886477134579215,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3550687722873153,
						"acc_stderr,none": 0.004830475853055746,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.36574450772986167,
						"acc_stderr,none": 0.004857604775791886,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.45098039215686275,
						"acc_stderr,none": 0.024664683843663437,
						"alias": "mrpc",
						"f1,none": 0.45365853658536587,
						"f1_stderr,none": 0.03057412330793303
					},
					"multimedqa": {
						"acc,none": 0.29737402413058905,
						"acc_norm,none": 0.2647179445941056,
						"acc_norm_stderr,none": 8.498507738732542e-05,
						"acc_stderr,none": 0.11338523752503174,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5579620462046204,
						"acc_stderr,none": 0.007133383932583475,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6860421385708566,
						"mrr_stderr,none": 0.010326315055753916,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.42776523702031605,
						"r@2_stderr,none": 0.01663099478654634
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6435289709885557,
						"mrr_stderr,none": 0.010453153173992907,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4616252821670429,
						"r@2_stderr,none": 0.01675774147880103
					},
					"openbookqa": {
						"acc,none": 0.294,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.021930844120728505,
						"acc_stderr,none": 0.0203950954849366,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.3665,
						"acc_stderr,none": 0.010777149366796484,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.3455,
						"acc_stderr,none": 0.01063585613469154,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.385,
						"acc_stderr,none": 0.010883323176386983,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.011180899170152976,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.4735,
						"acc_stderr,none": 0.011167418260963935,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5485,
						"acc_stderr,none": 0.01113040061763076,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.4195,
						"acc_stderr,none": 0.011037245371590673,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4355,
						"acc_stderr,none": 0.05530290478323628,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7519042437431991,
						"acc_norm,none": 0.7573449401523396,
						"acc_norm_stderr,none": 0.010002002569708698,
						"acc_stderr,none": 0.01007711831557472,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.23414816396242527,
						"acc_norm,none": 0.2785546541417592,
						"acc_norm_stderr,none": 0.0032751415813332763,
						"acc_stderr,none": 0.0030937905499022257,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.734,
						"acc_stderr,none": 0.019780559675655486,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7111206938271635,
						"acc_norm,none": 0.5551597643109178,
						"acc_norm_stderr,none": 0.009956089462458566,
						"acc_stderr,none": 0.1524940397519855,
						"alias": "pythia",
						"bits_per_byte,none": 0.6655260472001193,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5861465214371127,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.2109339571254605,
						"perplexity_stderr,none": 0.09447809444003981,
						"word_perplexity,none": 11.784900611347632,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3351063829787234,
						"acc_norm,none": 0.41843971631205673,
						"acc_norm_stderr,none": 0.06222883032359386,
						"acc_stderr,none": 0.047898574390541536,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4083333333333333,
						"acc_norm,none": 0.5416666666666666,
						"acc_norm_stderr,none": 0.04567549854280213,
						"acc_stderr,none": 0.045058059858031296,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.038851434494290536,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3345070422535211,
						"acc_norm,none": 0.3767605633802817,
						"acc_norm_stderr,none": 0.02880493928871122,
						"acc_stderr,none": 0.028046659818657005,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5346879004210141,
						"acc_stderr,none": 0.006749109948023715,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5028691565669058,
						"acc_stderr,none": 0.0024866596900210212,
						"alias": "qqp",
						"f1,none": 0.2955875652752953,
						"f1_stderr,none": 0.0035254536626231673
					},
					"race": {
						"acc,none": 0.3406698564593301,
						"acc_stderr,none": 0.014667904380876565,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5306859205776173,
						"acc_stderr,none": 0.03003973059219781,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.902,
						"acc_norm,none": 0.869,
						"acc_norm_stderr,none": 0.010674874844837956,
						"acc_stderr,none": 0.009406619184621233,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5270758122743683,
						"acc_stderr,none": 0.030052303463143706,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.49311926605504586,
						"acc_stderr,none": 0.016940249406163867,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.548335499350195,
						"acc_norm,none": 0.7419774067779666,
						"acc_norm_stderr,none": 0.003093534263894987,
						"acc_stderr,none": 0.0035185350577884027,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5490000332767628,
						"acc_stderr,none": 0.022098657972730456,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5530849358974359,
						"acc_stderr,none": 0.004975971408031274,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.5994729907773386,
						"acc_stderr,none": 0.004933216309968489,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.4961764705882353,
						"acc_stderr,none": 0.004950835653333895,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.28899459953341194,
						"acc_stderr,none": 0.001417793278876567,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2962056303549572,
						"bleu_acc_stderr,none": 0.015983595101811392,
						"bleu_diff,none": -7.712989173818157,
						"bleu_diff_stderr,none": 0.7761644940195728,
						"bleu_max,none": 24.119256687901157,
						"bleu_max_stderr,none": 0.7545894176850129,
						"rouge1_acc,none": 0.26438188494492043,
						"rouge1_acc_stderr,none": 0.015438211119522502,
						"rouge1_diff,none": -10.499797833766538,
						"rouge1_diff_stderr,none": 0.7998946584915853,
						"rouge1_max,none": 49.16599266502819,
						"rouge1_max_stderr,none": 0.86748579893984,
						"rouge2_acc,none": 0.22399020807833536,
						"rouge2_acc_stderr,none": 0.014594964329474207,
						"rouge2_diff,none": -12.087376053924952,
						"rouge2_diff_stderr,none": 0.9838091389086047,
						"rouge2_max,none": 32.82913940889599,
						"rouge2_max_stderr,none": 0.9770875656132315,
						"rougeL_acc,none": 0.2521419828641371,
						"rougeL_acc_stderr,none": 0.015201522246299969,
						"rougeL_diff,none": -10.93455550712026,
						"rougeL_diff_stderr,none": 0.8131722241769852,
						"rougeL_max,none": 46.06771389346665,
						"rougeL_max_stderr,none": 0.884616523271091
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2962056303549572,
						"bleu_acc_stderr,none": 0.015983595101811392,
						"bleu_diff,none": -7.712989173818157,
						"bleu_diff_stderr,none": 0.7761644940195728,
						"bleu_max,none": 24.119256687901157,
						"bleu_max_stderr,none": 0.7545894176850129,
						"rouge1_acc,none": 0.26438188494492043,
						"rouge1_acc_stderr,none": 0.015438211119522502,
						"rouge1_diff,none": -10.499797833766538,
						"rouge1_diff_stderr,none": 0.7998946584915853,
						"rouge1_max,none": 49.16599266502819,
						"rouge1_max_stderr,none": 0.86748579893984,
						"rouge2_acc,none": 0.22399020807833536,
						"rouge2_acc_stderr,none": 0.014594964329474207,
						"rouge2_diff,none": -12.087376053924952,
						"rouge2_diff_stderr,none": 0.9838091389086047,
						"rouge2_max,none": 32.82913940889599,
						"rouge2_max_stderr,none": 0.9770875656132315,
						"rougeL_acc,none": 0.2521419828641371,
						"rougeL_acc_stderr,none": 0.015201522246299969,
						"rougeL_diff,none": -10.93455550712026,
						"rougeL_diff_stderr,none": 0.8131722241769852,
						"rougeL_max,none": 46.06771389346665,
						"rougeL_max_stderr,none": 0.884616523271091
					},
					"truthfulqa_mc1": {
						"acc,none": 0.21909424724602203,
						"acc_stderr,none": 0.014480038578757447,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.35889495182080183,
						"acc_stderr,none": 0.01347698031717213,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.0359251968503937,
						"exact_match_stderr,none": 0.004129523888282568
					},
					"wic": {
						"acc,none": 0.5047021943573667,
						"acc_stderr,none": 0.01980984521925977,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6655260472001193,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5861465214371127,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 11.784900611347632,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6298342541436464,
						"acc_stderr,none": 0.013570454689603911,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5492957746478874,
						"acc_stderr,none": 0.05947027187737999,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8095238095238095,
						"acc_stderr,none": 0.023809523809523777,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5529090909090909,
						"acc_stderr,none": 0.03557018585454539,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.02235279165091416,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.022361396739207867,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.58,
						"acc_stderr,none": 0.022094713229761784,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.604,
						"acc_stderr,none": 0.02189352994166582,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.02238289498648353,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231336,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.022303966774269948,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.574,
						"acc_stderr,none": 0.022136577335085637,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.584,
						"acc_stderr,none": 0.022064943313928862,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.586,
						"acc_stderr,none": 0.02204949796982787,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.4210441767068273,
						"acc_stderr,none": 0.04877213923480642,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3409638554216867,
						"acc_stderr,none": 0.009501591178361543,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.42730923694779116,
						"acc_stderr,none": 0.009915595034908124,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4734939759036145,
						"acc_stderr,none": 0.010007980557732814,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.36224899598393573,
						"acc_stderr,none": 0.009634223618009015,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5477911646586345,
						"acc_stderr,none": 0.009976187086803713,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.45943775100401607,
						"acc_stderr,none": 0.009989039874786904,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.45180722891566266,
						"acc_stderr,none": 0.009975410845717852,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.41164658634538154,
						"acc_stderr,none": 0.009864360821750334,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.47269076305220886,
						"acc_stderr,none": 0.010007112889731976,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3642570281124498,
						"acc_stderr,none": 0.009645667910246843,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.38433734939759034,
						"acc_stderr,none": 0.009750238765722516,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.43052208835341366,
						"acc_stderr,none": 0.009924844537285527,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.36947791164658633,
						"acc_stderr,none": 0.009674576085776442,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.4538152610441767,
						"acc_stderr,none": 0.009979226512455494,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.36586345381526103,
						"acc_stderr,none": 0.00965469276557258,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5632633415558631,
						"acc_stderr,none": 0.057373890886261665,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.5175380542686963,
						"acc_stderr,none": 0.012859207453266306,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7260092653871608,
						"acc_stderr,none": 0.011477585756917189,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6346790205162144,
						"acc_stderr,none": 0.012391557728373985,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5241561879549967,
						"acc_stderr,none": 0.012852100057309614,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5354070152217075,
						"acc_stderr,none": 0.01283482285286004,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5698213103904699,
						"acc_stderr,none": 0.01274105281747108,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4990072799470549,
						"acc_stderr,none": 0.012867099955422944,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5737921906022502,
						"acc_stderr,none": 0.012726223450627901,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.49768365320979485,
						"acc_stderr,none": 0.012866987239478045,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5532759761747187,
						"acc_stderr,none": 0.012793874526730217,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5645268034414295,
						"acc_stderr,none": 0.012759525506489237,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7561249719037986,
						"acc_stderr,none": 0.046369938156017504,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8417204301075268,
						"acc_stderr,none": 0.00757143682626236,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6506024096385542,
						"acc_stderr,none": 0.0526515135644047,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6506777893639207,
						"acc_stderr,none": 0.015403284489386057,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6844106463878327,
						"acc_stderr,none": 0.02871236092422276,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6507936507936508,
						"acc_stderr,none": 0.026902825537698717,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6825396825396826,
						"acc_stderr,none": 0.020755092996296517,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "state-spaces/mamba-2.8b-hf"
	},
	"tiiuae/falcon-7b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6338782412626832,
						"acc_norm,none": 0.6186583990980834,
						"acc_norm_stderr,none": 0.044049926782733256,
						"acc_stderr,none": 0.05474959337674877,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3540625,
						"acc_stderr,none": 0.01691743270687656,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.23665,
						"acc_stderr,none": 0.21591247448244397,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8172537313432836,
						"acc_stderr,none": 0.1642018708246464,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.24962852897473997,
						"acc_norm,none": 0.24962852897473997,
						"acc_norm_stderr,none": 0.11459057055150745,
						"acc_stderr,none": 0.11459057055150745,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.24728026247625626,
						"acc_norm,none": 0.24728026247625626,
						"acc_norm_stderr,none": 0.03519032433849343,
						"acc_stderr,none": 0.03519032433849343,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.3802735539654143,
						"likelihood_diff_stderr,none": 0.504755793817865,
						"pct_stereotype,none": 0.6475849731663685,
						"pct_stereotype_stderr,none": 0.06023625046908003
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.1279527559055118,
						"exact_match_stderr,none": 0.007412076063525489
					},
					"glue": {
						"acc,none": 0.4577606800519776,
						"acc_stderr,none": 0.03671679326985771,
						"alias": "glue",
						"f1,none": 0.48751151502587453,
						"f1_stderr,none": 0.0006971121888985943,
						"mcc,none": 0.05873054109498616,
						"mcc_stderr,none": 0.0011673313475584989
					},
					"kmmlu": {
						"acc,none": 0.28264510539994214,
						"acc_norm,none": 0.28264510539994214,
						"acc_norm_stderr,none": 0.02601208522542946,
						"acc_stderr,none": 0.02601208522542946,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.46941460206095154,
						"acc_norm,none": 0.458,
						"acc_norm_stderr,none": 0.0004974669338677385,
						"acc_stderr,none": 0.04459091384281508,
						"alias": "kobest",
						"f1,none": 0.37186129039626437,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.715990685037842,
						"acc_stderr,none": 0.015892540569058613,
						"alias": "lambada",
						"perplexity,none": 3.6628247406457177,
						"perplexity_stderr,none": 0.1625132371820016
					},
					"lambada_cloze": {
						"acc,none": 0.019115078594993208,
						"acc_stderr,none": 0.0020090951884566745,
						"alias": "lambada_cloze",
						"perplexity,none": 197.41253446417088,
						"perplexity_stderr,none": 13.780615990717683
					},
					"lambada_multilingual": {
						"acc,none": 0.5126722297690666,
						"acc_stderr,none": 0.07140287801348252,
						"alias": "lambada_multilingual",
						"perplexity,none": 28.66553270480355,
						"perplexity_stderr,none": 10.316726322742879
					},
					"mmlu": {
						"acc,none": 0.25124626121635096,
						"acc_stderr,none": 0.040123172634924875,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.26057385759829976,
						"acc_stderr,none": 0.03396620709314821,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.25555197940135177,
						"acc_stderr,none": 0.04086467458875185,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2430939226519337,
						"acc_stderr,none": 0.03690183735776518,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.24104027909927053,
						"acc_stderr,none": 0.04824039520692752,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2958126330731015,
						"acc_norm,none": 0.2754739830660537,
						"acc_norm_stderr,none": 8.751546615277735e-05,
						"acc_stderr,none": 0.09499139878568526,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.48192857142857143,
						"acc_stderr,none": 0.04936705250378216,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7167411129823723,
						"acc_norm,none": 0.6232890725893111,
						"acc_norm_stderr,none": 0.004376227713404006,
						"acc_stderr,none": 0.1530606905089029,
						"alias": "pythia",
						"bits_per_byte,none": 0.6449064054167248,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5636378422407153,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.371030447212134,
						"perplexity_stderr,none": 0.06463160820980969,
						"word_perplexity,none": 10.91776141132558,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3900709219858156,
						"acc_norm,none": 0.4574468085106383,
						"acc_norm_stderr,none": 0.050384093205958665,
						"acc_stderr,none": 0.044605867998649226,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5511297460982996,
						"acc_stderr,none": 0.07908027444684382,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.2825705305262654,
						"acc_stderr,none": 0.0010887149901545557,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3108935128518972,
						"bleu_acc_stderr,none": 0.016203316673559693,
						"bleu_diff,none": -9.354374155744162,
						"bleu_diff_stderr,none": 0.8230553636710657,
						"bleu_max,none": 23.138368130095003,
						"bleu_max_stderr,none": 0.7650589816904417,
						"rouge1_acc,none": 0.2607099143206854,
						"rouge1_acc_stderr,none": 0.015368841620766372,
						"rouge1_diff,none": -12.920617255342343,
						"rouge1_diff_stderr,none": 0.896332620078308,
						"rouge1_max,none": 46.5363077764266,
						"rouge1_max_stderr,none": 0.9050743200460277,
						"rouge2_acc,none": 0.19461444308445533,
						"rouge2_acc_stderr,none": 0.01385939820702943,
						"rouge2_diff,none": -15.101502049728007,
						"rouge2_diff_stderr,none": 1.0680310771067492,
						"rouge2_max,none": 29.447400173329665,
						"rouge2_max_stderr,none": 1.0100666515515113,
						"rougeL_acc,none": 0.25091799265605874,
						"rougeL_acc_stderr,none": 0.01517698502770769,
						"rougeL_diff,none": -13.30538274852999,
						"rougeL_diff_stderr,none": 0.9101675355915293,
						"rougeL_max,none": 43.8008067041745,
						"rougeL_max_stderr,none": 0.9007127526702894
					},
					"xcopa": {
						"acc,none": 0.5596363636363636,
						"acc_stderr,none": 0.07164986756889576,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.3897724230254351,
						"acc_stderr,none": 0.05644927888896788,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5603754286745685,
						"acc_stderr,none": 0.08604949243256223,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7772533153517645,
						"acc_stderr,none": 0.07015448404595596,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6338782412626832,
						"acc_norm,none": 0.6186583990980834,
						"acc_norm_stderr,none": 0.044049926782733256,
						"acc_stderr,none": 0.05474959337674877,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3540625,
						"acc_stderr,none": 0.01691743270687656,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.332,
						"acc_stderr,none": 0.014899597242811478,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.359,
						"acc_stderr,none": 0.01517726422479859,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.36833333333333335,
						"acc_stderr,none": 0.013930121355353773,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.40273037542662116,
						"acc_norm,none": 0.4351535836177474,
						"acc_norm_stderr,none": 0.01448798619718605,
						"acc_stderr,none": 0.01433223630679014,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.747895622895623,
						"acc_norm,none": 0.7091750841750841,
						"acc_norm_stderr,none": 0.009318815921176652,
						"acc_stderr,none": 0.008910024163218191,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.23665,
						"acc_stderr,none": 0.21591247448244397,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.077,
						"acc_stderr,none": 0.005962656843917707,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.7945,
						"acc_stderr,none": 0.00903746163789507,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.194,
						"acc_stderr,none": 0.008844269927771192,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.01112707984841374,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.1465,
						"acc_stderr,none": 0.007908865283657343,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.413,
						"acc_stderr,none": 0.011012544577391419,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0025,
						"acc_stderr,none": 0.0011169148353275301,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.108,
						"acc_stderr,none": 0.006942052725816977,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.081,
						"acc_stderr,none": 0.006102304405675837,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.006941431670281995,
						"acc_stderr,none": 0.0017296997417071969,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8172537313432836,
						"acc_stderr,none": 0.1642018708246464,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.00985982840703719,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329814,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.002231586874844884,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366662,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491129,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.725,
						"acc_stderr,none": 0.014127086556490524,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.625,
						"acc_stderr,none": 0.015316971293620996,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.743,
						"acc_stderr,none": 0.013825416526895031,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724432,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695478,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.007454835650406725,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.0072129762946392395,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.935,
						"acc_stderr,none": 0.007799733061832016,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.914,
						"acc_stderr,none": 0.008870325962594766,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523689,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452367,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.866,
						"acc_stderr,none": 0.010777762298369678,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.697,
						"acc_stderr,none": 0.014539683710535259,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.693,
						"acc_stderr,none": 0.014593284892852621,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.829,
						"acc_stderr,none": 0.0119122164562646,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706822,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.824,
						"acc_stderr,none": 0.012048616898597488,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.0040899544896891024,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.455,
						"acc_stderr,none": 0.01575510149834709,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.895,
						"acc_stderr,none": 0.009698921026024956,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.786,
						"acc_stderr,none": 0.012975838021968769,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.015325105508898129,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.746,
						"acc_stderr,none": 0.013772206565168537,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910641,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.01123486636423524,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866442,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491113,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.719,
						"acc_stderr,none": 0.014221154708434944,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.874,
						"acc_stderr,none": 0.010499249222408047,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.372,
						"acc_stderr,none": 0.015292149942040575,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.01570498795436179,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.592,
						"acc_stderr,none": 0.015549205052920675,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524293,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.544,
						"acc_stderr,none": 0.015757928553979172,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.907,
						"acc_stderr,none": 0.00918887563499668,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523727,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.797,
						"acc_stderr,none": 0.012726073744598268,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523736,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.001413505570557824,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.845,
						"acc_stderr,none": 0.011450157470799456,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.395,
						"acc_stderr,none": 0.015466551464829342,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.957,
						"acc_stderr,none": 0.006418114379799741,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.00905439020486644,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.761,
						"acc_stderr,none": 0.01349300044693759,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.015818160898606715,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557432,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333443,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.601,
						"acc_stderr,none": 0.015493193313162908,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.0109781838443578,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.011075814808567038,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.01070937396352803,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.838,
						"acc_stderr,none": 0.01165726777130443,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523693,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.008484573530118587,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178322,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.97,
						"acc_stderr,none": 0.005397140829099214,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.014498627873361428,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.283,
						"acc_stderr,none": 0.01425181090648176,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7379204892966361,
						"acc_stderr,none": 0.00769154984743817,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.375,
						"acc_stderr,none": 0.06527912098338669,
						"alias": "cb",
						"f1,none": 0.36227486676780646,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.24962852897473997,
						"acc_norm,none": 0.24962852897473997,
						"acc_norm_stderr,none": 0.11459057055150745,
						"acc_stderr,none": 0.11459057055150745,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.16326530612244897,
						"acc_norm,none": 0.16326530612244897,
						"acc_norm_stderr,none": 0.053348255582850765,
						"acc_stderr,none": 0.053348255582850765,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.06818181818181816,
						"acc_stderr,none": 0.06818181818181816,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.10638297872340426,
						"acc_norm,none": 0.10638297872340426,
						"acc_norm_stderr,none": 0.04546036031565445,
						"acc_stderr,none": 0.04546036031565445,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0606060606060606,
						"acc_stderr,none": 0.0606060606060606,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.08539125638299665,
						"acc_stderr,none": 0.08539125638299665,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.3225806451612903,
						"acc_norm,none": 0.3225806451612903,
						"acc_norm_stderr,none": 0.08534681648595453,
						"acc_stderr,none": 0.08534681648595453,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.3548387096774194,
						"acc_norm,none": 0.3548387096774194,
						"acc_norm_stderr,none": 0.08735525166275225,
						"acc_stderr,none": 0.08735525166275225,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756935,
						"acc_stderr,none": 0.10513149660756935,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.05763033956734372,
						"acc_stderr,none": 0.05763033956734372,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09933992677987828,
						"acc_stderr,none": 0.09933992677987828,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.1123666437438737,
						"acc_stderr,none": 0.1123666437438737,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522557,
						"acc_stderr,none": 0.11369720523522557,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.1049727762162956,
						"acc_stderr,none": 0.1049727762162956,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.13793103448275862,
						"acc_norm,none": 0.13793103448275862,
						"acc_norm_stderr,none": 0.06516628844986677,
						"acc_stderr,none": 0.06516628844986677,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141221,
						"acc_stderr,none": 0.06372446937141221,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.06791703342160262,
						"acc_stderr,none": 0.06791703342160262,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.07099970268936745,
						"acc_stderr,none": 0.07099970268936745,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482894,
						"acc_stderr,none": 0.09810018692482894,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.24728026247625626,
						"acc_norm,none": 0.24728026247625626,
						"acc_norm_stderr,none": 0.03519032433849343,
						"acc_stderr,none": 0.03519032433849343,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516737,
						"acc_stderr,none": 0.03385633936516737,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.22972972972972974,
						"acc_norm,none": 0.22972972972972974,
						"acc_norm_stderr,none": 0.03469536825407609,
						"acc_stderr,none": 0.03469536825407609,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.21951219512195122,
						"acc_norm,none": 0.21951219512195122,
						"acc_norm_stderr,none": 0.032420416133953835,
						"acc_stderr,none": 0.032420416133953835,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.03404916326237584,
						"acc_stderr,none": 0.03404916326237584,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2606060606060606,
						"acc_norm,none": 0.2606060606060606,
						"acc_norm_stderr,none": 0.03427743175816524,
						"acc_stderr,none": 0.03427743175816524,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2583732057416268,
						"acc_norm,none": 0.2583732057416268,
						"acc_norm_stderr,none": 0.030351822614803414,
						"acc_stderr,none": 0.030351822614803414,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.03404916326237584,
						"acc_stderr,none": 0.03404916326237584,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.25190839694656486,
						"acc_norm,none": 0.25190839694656486,
						"acc_norm_stderr,none": 0.03807387116306085,
						"acc_stderr,none": 0.03807387116306085,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037267799624996496,
						"acc_stderr,none": 0.037267799624996496,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.042188119282053044,
						"acc_stderr,none": 0.042188119282053044,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25386996904024767,
						"acc_norm,none": 0.25386996904024767,
						"acc_norm_stderr,none": 0.024254090252458047,
						"acc_stderr,none": 0.024254090252458047,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.24019607843137256,
						"acc_norm,none": 0.24019607843137256,
						"acc_norm_stderr,none": 0.02998373305591361,
						"acc_stderr,none": 0.02998373305591361,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.25139664804469275,
						"acc_norm,none": 0.25139664804469275,
						"acc_norm_stderr,none": 0.032515888371841106,
						"acc_stderr,none": 0.032515888371841106,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25316455696202533,
						"acc_norm,none": 0.25316455696202533,
						"acc_norm_stderr,none": 0.0283046579430353,
						"acc_stderr,none": 0.0283046579430353,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.19811320754716982,
						"acc_norm,none": 0.19811320754716982,
						"acc_norm_stderr,none": 0.03889722288318549,
						"acc_stderr,none": 0.03889722288318549,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.29906542056074764,
						"acc_norm,none": 0.29906542056074764,
						"acc_norm_stderr,none": 0.044470182376718334,
						"acc_stderr,none": 0.044470182376718334,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.04252016223763312,
						"acc_stderr,none": 0.04252016223763312,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.04330043749650743,
						"acc_stderr,none": 0.04330043749650743,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.0423247353205504,
						"acc_stderr,none": 0.0423247353205504,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.04252016223763309,
						"acc_stderr,none": 0.04252016223763309,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.25274725274725274,
						"acc_norm,none": 0.25274725274725274,
						"acc_norm_stderr,none": 0.026350722655564405,
						"acc_stderr,none": 0.026350722655564405,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.24509803921568626,
						"acc_norm,none": 0.24509803921568626,
						"acc_norm_stderr,none": 0.03019028245350194,
						"acc_stderr,none": 0.03019028245350194,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.23976608187134502,
						"acc_norm,none": 0.23976608187134502,
						"acc_norm_stderr,none": 0.03274485211946957,
						"acc_stderr,none": 0.03274485211946957,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.034531515032766795,
						"acc_stderr,none": 0.034531515032766795,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.23741007194244604,
						"acc_norm,none": 0.23741007194244604,
						"acc_norm_stderr,none": 0.036220593237998276,
						"acc_stderr,none": 0.036220593237998276,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.2389937106918239,
						"acc_norm,none": 0.2389937106918239,
						"acc_norm_stderr,none": 0.03392804345289632,
						"acc_stderr,none": 0.03392804345289632,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25153374233128833,
						"acc_norm,none": 0.25153374233128833,
						"acc_norm_stderr,none": 0.034089978868575295,
						"acc_stderr,none": 0.034089978868575295,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2616279069767442,
						"acc_norm,none": 0.2616279069767442,
						"acc_norm_stderr,none": 0.033611014038904936,
						"acc_stderr,none": 0.033611014038904936,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.24206349206349206,
						"acc_norm,none": 0.24206349206349206,
						"acc_norm_stderr,none": 0.02703610967923697,
						"acc_stderr,none": 0.02703610967923697,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.25252525252525254,
						"acc_norm,none": 0.25252525252525254,
						"acc_norm_stderr,none": 0.030954055470365904,
						"acc_stderr,none": 0.030954055470365904,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.23949579831932774,
						"acc_norm,none": 0.23949579831932774,
						"acc_norm_stderr,none": 0.027722065493361252,
						"acc_stderr,none": 0.027722065493361252,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.24347826086956523,
						"acc_norm,none": 0.24347826086956523,
						"acc_norm_stderr,none": 0.02836109930007507,
						"acc_stderr,none": 0.02836109930007507,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.035914440841969694,
						"acc_stderr,none": 0.035914440841969694,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.23776223776223776,
						"acc_norm,none": 0.23776223776223776,
						"acc_norm_stderr,none": 0.0357250214181557,
						"acc_stderr,none": 0.0357250214181557,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.24431818181818182,
						"acc_norm,none": 0.24431818181818182,
						"acc_norm_stderr,none": 0.03248092256353737,
						"acc_stderr,none": 0.03248092256353737,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.26174496644295303,
						"acc_norm,none": 0.26174496644295303,
						"acc_norm_stderr,none": 0.036133623910754545,
						"acc_stderr,none": 0.036133623910754545,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.0327931779226895,
						"acc_stderr,none": 0.0327931779226895,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.20454545454545456,
						"acc_norm,none": 0.20454545454545456,
						"acc_norm_stderr,none": 0.03524251981380331,
						"acc_stderr,none": 0.03524251981380331,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714263,
						"acc_stderr,none": 0.04025566684714263,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.03334645408665337,
						"acc_stderr,none": 0.03334645408665337,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.041220665028782834,
						"acc_stderr,none": 0.041220665028782834,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.24475524475524477,
						"acc_norm,none": 0.24475524475524477,
						"acc_norm_stderr,none": 0.03607993033081377,
						"acc_stderr,none": 0.03607993033081377,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.23015873015873015,
						"acc_norm,none": 0.23015873015873015,
						"acc_norm_stderr,none": 0.037649508797906066,
						"acc_stderr,none": 0.037649508797906066,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.032092816451453864,
						"acc_stderr,none": 0.032092816451453864,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2441860465116279,
						"acc_norm,none": 0.2441860465116279,
						"acc_norm_stderr,none": 0.03285260554707746,
						"acc_stderr,none": 0.03285260554707746,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25547445255474455,
						"acc_norm,none": 0.25547445255474455,
						"acc_norm_stderr,none": 0.021538805402399563,
						"acc_stderr,none": 0.021538805402399563,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.24299065420560748,
						"acc_norm,none": 0.24299065420560748,
						"acc_norm_stderr,none": 0.02938702375433312,
						"acc_stderr,none": 0.02938702375433312,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.25203252032520324,
						"acc_norm,none": 0.25203252032520324,
						"acc_norm_stderr,none": 0.039308795268239924,
						"acc_stderr,none": 0.039308795268239924,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.28688524590163933,
						"acc_norm,none": 0.28688524590163933,
						"acc_norm_stderr,none": 0.041118866352671826,
						"acc_stderr,none": 0.041118866352671826,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2523809523809524,
						"acc_norm,none": 0.2523809523809524,
						"acc_norm_stderr,none": 0.03004659915603149,
						"acc_stderr,none": 0.03004659915603149,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032364888900157734,
						"acc_stderr,none": 0.032364888900157734,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2328042328042328,
						"acc_norm,none": 0.2328042328042328,
						"acc_norm_stderr,none": 0.030822624150702204,
						"acc_stderr,none": 0.030822624150702204,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.0399037253226882,
						"acc_stderr,none": 0.0399037253226882,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2482758620689655,
						"acc_norm,none": 0.2482758620689655,
						"acc_norm_stderr,none": 0.0360010569272777,
						"acc_stderr,none": 0.0360010569272777,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.04176466758604901,
						"acc_stderr,none": 0.04176466758604901,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.03288889734209821,
						"acc_stderr,none": 0.03288889734209821,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.25118483412322273,
						"acc_norm,none": 0.25118483412322273,
						"acc_norm_stderr,none": 0.029927771242945198,
						"acc_stderr,none": 0.029927771242945198,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2579787234042553,
						"acc_norm,none": 0.2579787234042553,
						"acc_norm_stderr,none": 0.022593550801056256,
						"acc_stderr,none": 0.022593550801056256,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.24568965517241378,
						"acc_norm,none": 0.24568965517241378,
						"acc_norm_stderr,none": 0.028324514684171163,
						"acc_stderr,none": 0.028324514684171163,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.032534138484822554,
						"acc_stderr,none": 0.032534138484822554,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03785714465066652,
						"acc_stderr,none": 0.03785714465066652,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.24778761061946902,
						"acc_norm,none": 0.24778761061946902,
						"acc_norm_stderr,none": 0.028781854672921457,
						"acc_stderr,none": 0.028781854672921457,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.23243243243243245,
						"acc_norm,none": 0.23243243243243245,
						"acc_norm_stderr,none": 0.031138505170794674,
						"acc_stderr,none": 0.031138505170794674,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323178,
						"acc_stderr,none": 0.03307162750323178,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2546583850931677,
						"acc_norm,none": 0.2546583850931677,
						"acc_norm_stderr,none": 0.03444265995779324,
						"acc_stderr,none": 0.03444265995779324,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.034893706520187605,
						"acc_stderr,none": 0.034893706520187605,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.05873054109498616,
						"mcc_stderr,none": 0.03416623109970573
					},
					"copa": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.03265986323710906,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.3802735539654143,
						"likelihood_diff_stderr,none": 0.504755793817865,
						"pct_stereotype,none": 0.6475849731663685,
						"pct_stereotype_stderr,none": 0.06023625046908003
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.655113297555158,
						"likelihood_diff_stderr,none": 0.08266192276800512,
						"pct_stereotype,none": 0.667262969588551,
						"pct_stereotype_stderr,none": 0.011509648826633226
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.31456043956044,
						"likelihood_diff_stderr,none": 0.4034031941461593,
						"pct_stereotype,none": 0.7032967032967034,
						"pct_stereotype_stderr,none": 0.048151433626827785
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.590909090909091,
						"likelihood_diff_stderr,none": 1.605485174216224,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.351923076923077,
						"likelihood_diff_stderr,none": 0.611285173719654,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.66328125,
						"likelihood_diff_stderr,none": 0.15855277423232375,
						"pct_stereotype,none": 0.665625,
						"pct_stereotype_stderr,none": 0.026414133635631592
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.466435185185185,
						"likelihood_diff_stderr,none": 0.21338862206944156,
						"pct_stereotype,none": 0.6203703703703703,
						"pct_stereotype_stderr,none": 0.03309682581119035
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.8836805555555554,
						"likelihood_diff_stderr,none": 0.3156465839713233,
						"pct_stereotype,none": 0.7777777777777778,
						"pct_stereotype_stderr,none": 0.04933922619854289
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.3636811023622046,
						"likelihood_diff_stderr,none": 0.13332385568386187,
						"pct_stereotype,none": 0.5866141732283464,
						"pct_stereotype_stderr,none": 0.02187006568731772
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.7364864864864864,
						"likelihood_diff_stderr,none": 0.33053836160947103,
						"pct_stereotype,none": 0.7387387387387387,
						"pct_stereotype_stderr,none": 0.04188770861432396
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.891129032258065,
						"likelihood_diff_stderr,none": 0.4632078509082206,
						"pct_stereotype,none": 0.8817204301075269,
						"pct_stereotype_stderr,none": 0.03366870454347983
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.222368421052631,
						"likelihood_diff_stderr,none": 0.2338998335055211,
						"pct_stereotype,none": 0.6894736842105263,
						"pct_stereotype_stderr,none": 0.03365713545671698
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.1056201550387597,
						"likelihood_diff_stderr,none": 0.07108247498564818,
						"pct_stereotype,none": 0.6285032796660703,
						"pct_stereotype_stderr,none": 0.011803050329696958
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.0805555555555557,
						"likelihood_diff_stderr,none": 0.3012685875186723,
						"pct_stereotype,none": 0.6,
						"pct_stereotype_stderr,none": 0.051929078688949845
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 1.9807692307692308,
						"likelihood_diff_stderr,none": 0.3770898375216842,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.14044168141158106
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.590909090909091,
						"likelihood_diff_stderr,none": 0.40429066647202916,
						"pct_stereotype,none": 0.7727272727272727,
						"pct_stereotype_stderr,none": 0.05197926135426054
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 2.6561526479750777,
						"likelihood_diff_stderr,none": 0.14269795589029027,
						"pct_stereotype,none": 0.6230529595015576,
						"pct_stereotype_stderr,none": 0.027091163755336607
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.2371541501976284,
						"likelihood_diff_stderr,none": 0.18184149973942149,
						"pct_stereotype,none": 0.48616600790513836,
						"pct_stereotype_stderr,none": 0.03148498134426772
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.6006944444444446,
						"likelihood_diff_stderr,none": 0.4016182093570985,
						"pct_stereotype,none": 0.7222222222222222,
						"pct_stereotype_stderr,none": 0.053156331218399945
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 2.8230978260869564,
						"likelihood_diff_stderr,none": 0.1264771621001447,
						"pct_stereotype,none": 0.5826086956521739,
						"pct_stereotype_stderr,none": 0.023017271312104015
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.0402173913043478,
						"likelihood_diff_stderr,none": 0.2422794702212937,
						"pct_stereotype,none": 0.7217391304347827,
						"pct_stereotype_stderr,none": 0.04197239673902095
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.418956043956044,
						"likelihood_diff_stderr,none": 0.29549020255770186,
						"pct_stereotype,none": 0.8021978021978022,
						"pct_stereotype_stderr,none": 0.041988952031962214
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.635841836734694,
						"likelihood_diff_stderr,none": 0.25509070405866724,
						"pct_stereotype,none": 0.7244897959183674,
						"pct_stereotype_stderr,none": 0.031993936246679046
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.1279527559055118,
						"exact_match_stderr,none": 0.007412076063525489
					},
					"glue": {
						"acc,none": 0.4577606800519776,
						"acc_stderr,none": 0.03671679326985771,
						"alias": "glue",
						"f1,none": 0.48751151502587453,
						"f1_stderr,none": 0.0006971121888985943,
						"mcc,none": 0.05873054109498616,
						"mcc_stderr,none": 0.0011673313475584989
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.05079605761940864,
						"exact_match_stderr,get-answer": 0.006048352096878092
					},
					"hellaswag": {
						"acc,none": 0.5771758613821948,
						"acc_norm,none": 0.7638916550487951,
						"acc_norm_stderr,none": 0.004238215815533088,
						"acc_stderr,none": 0.00492998369279507,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.28264510539994214,
						"acc_norm,none": 0.28264510539994214,
						"acc_norm_stderr,none": 0.02601208522542946,
						"acc_stderr,none": 0.02601208522542946,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.04229525846816508,
						"acc_stderr,none": 0.04229525846816508,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.014205696104091513,
						"acc_stderr,none": 0.014205696104091513,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.302,
						"acc_norm,none": 0.302,
						"acc_norm_stderr,none": 0.014526080235459548,
						"acc_stderr,none": 0.014526080235459548,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.254,
						"acc_norm,none": 0.254,
						"acc_norm_stderr,none": 0.013772206565168543,
						"acc_stderr,none": 0.013772206565168543,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.292,
						"acc_norm,none": 0.292,
						"acc_norm_stderr,none": 0.01438551156347735,
						"acc_stderr,none": 0.01438551156347735,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.2633333333333333,
						"acc_norm,none": 0.2633333333333333,
						"acc_norm_stderr,none": 0.017995959892029623,
						"acc_stderr,none": 0.017995959892029623,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.014356395999905692,
						"acc_stderr,none": 0.014356395999905692,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.309,
						"acc_norm,none": 0.309,
						"acc_norm_stderr,none": 0.014619600977206488,
						"acc_stderr,none": 0.014619600977206488,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.309,
						"acc_norm,none": 0.309,
						"acc_norm_stderr,none": 0.014619600977206484,
						"acc_stderr,none": 0.014619600977206484,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.029365141882663322,
						"acc_stderr,none": 0.029365141882663322,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.276,
						"acc_norm,none": 0.276,
						"acc_norm_stderr,none": 0.014142984975740671,
						"acc_stderr,none": 0.014142984975740671,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.035218036253024915,
						"acc_stderr,none": 0.035218036253024915,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.04793724854411018,
						"acc_stderr,none": 0.04793724854411018,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.296,
						"acc_norm,none": 0.296,
						"acc_norm_stderr,none": 0.014442734941575025,
						"acc_stderr,none": 0.014442734941575025,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.298,
						"acc_norm,none": 0.298,
						"acc_norm_stderr,none": 0.014470846741134713,
						"acc_stderr,none": 0.014470846741134713,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.293,
						"acc_norm,none": 0.293,
						"acc_norm_stderr,none": 0.014399942998441276,
						"acc_stderr,none": 0.014399942998441276,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.296,
						"acc_norm,none": 0.296,
						"acc_norm_stderr,none": 0.01444273494157502,
						"acc_stderr,none": 0.01444273494157502,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.014282120955200478,
						"acc_stderr,none": 0.014282120955200478,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.257,
						"acc_norm,none": 0.257,
						"acc_norm_stderr,none": 0.013825416526895038,
						"acc_stderr,none": 0.013825416526895038,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.299,
						"acc_norm,none": 0.299,
						"acc_norm_stderr,none": 0.014484778521220473,
						"acc_stderr,none": 0.014484778521220473,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.289,
						"acc_norm,none": 0.289,
						"acc_norm_stderr,none": 0.014341711358296191,
						"acc_stderr,none": 0.014341711358296191,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.0416333199893227,
						"acc_stderr,none": 0.0416333199893227,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.296,
						"acc_norm,none": 0.296,
						"acc_norm_stderr,none": 0.014442734941575022,
						"acc_stderr,none": 0.014442734941575022,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.311,
						"acc_norm,none": 0.311,
						"acc_norm_stderr,none": 0.014645596385722694,
						"acc_stderr,none": 0.014645596385722694,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.301,
						"acc_norm,none": 0.301,
						"acc_norm_stderr,none": 0.014512395033543141,
						"acc_stderr,none": 0.014512395033543141,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.248,
						"acc_norm,none": 0.248,
						"acc_norm_stderr,none": 0.01366318713487766,
						"acc_stderr,none": 0.01366318713487766,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.303,
						"acc_norm,none": 0.303,
						"acc_norm_stderr,none": 0.014539683710535253,
						"acc_stderr,none": 0.014539683710535253,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.246,
						"acc_norm,none": 0.246,
						"acc_norm_stderr,none": 0.013626065817750643,
						"acc_stderr,none": 0.013626065817750643,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.018571578902773448,
						"acc_stderr,none": 0.018571578902773448,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440469,
						"acc_stderr,none": 0.013946271849440469,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.306,
						"acc_norm,none": 0.306,
						"acc_norm_stderr,none": 0.01458000605543697,
						"acc_stderr,none": 0.01458000605543697,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.293,
						"acc_norm,none": 0.293,
						"acc_norm_stderr,none": 0.01439994299844128,
						"acc_stderr,none": 0.01439994299844128,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.278,
						"acc_norm,none": 0.278,
						"acc_norm_stderr,none": 0.014174516461485247,
						"acc_stderr,none": 0.014174516461485247,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.03861229196653694,
						"acc_stderr,none": 0.03861229196653694,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23666666666666666,
						"acc_norm,none": 0.23666666666666666,
						"acc_norm_stderr,none": 0.024580463430538727,
						"acc_stderr,none": 0.024580463430538727,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.247,
						"acc_norm,none": 0.247,
						"acc_norm_stderr,none": 0.01364467578131413,
						"acc_stderr,none": 0.01364467578131413,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.308,
						"acc_norm,none": 0.308,
						"acc_norm_stderr,none": 0.014606483127342763,
						"acc_stderr,none": 0.014606483127342763,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440474,
						"acc_stderr,none": 0.013946271849440474,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.030695456590127176,
						"acc_stderr,none": 0.030695456590127176,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.264,
						"acc_norm,none": 0.264,
						"acc_norm_stderr,none": 0.013946271849440466,
						"acc_stderr,none": 0.013946271849440466,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.261,
						"acc_norm,none": 0.261,
						"acc_norm_stderr,none": 0.013895037677965136,
						"acc_stderr,none": 0.013895037677965136,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.285,
						"acc_norm,none": 0.285,
						"acc_norm_stderr,none": 0.03199992148231579,
						"acc_stderr,none": 0.03199992148231579,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.294,
						"acc_norm,none": 0.294,
						"acc_norm_stderr,none": 0.014414290540008215,
						"acc_stderr,none": 0.014414290540008215,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.46941460206095154,
						"acc_norm,none": 0.458,
						"acc_norm_stderr,none": 0.0004974669338677385,
						"acc_stderr,none": 0.04459091384281508,
						"alias": "kobest",
						"f1,none": 0.37186129039626437,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.476,
						"acc_stderr,none": 0.015801065586651758,
						"alias": " - kobest_copa",
						"f1,none": 0.47507403058996067,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.312,
						"acc_norm,none": 0.458,
						"acc_norm_stderr,none": 0.022303966774269962,
						"acc_stderr,none": 0.020740596536488087,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.30880660440843244,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.4760705289672544,
						"acc_stderr,none": 0.025097153668550934,
						"alias": " - kobest_sentineg",
						"f1,none": 0.46340195466833023,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.715990685037842,
						"acc_stderr,none": 0.015892540569058613,
						"alias": "lambada",
						"perplexity,none": 3.6628247406457177,
						"perplexity_stderr,none": 0.1625132371820016
					},
					"lambada_cloze": {
						"acc,none": 0.019115078594993208,
						"acc_stderr,none": 0.0020090951884566745,
						"alias": "lambada_cloze",
						"perplexity,none": 197.41253446417088,
						"perplexity_stderr,none": 13.780615990717683
					},
					"lambada_multilingual": {
						"acc,none": 0.5126722297690666,
						"acc_stderr,none": 0.07140287801348252,
						"alias": "lambada_multilingual",
						"perplexity,none": 28.66553270480355,
						"perplexity_stderr,none": 10.316726322742879
					},
					"lambada_openai": {
						"acc,none": 0.7459732194837958,
						"acc_stderr,none": 0.006064757540495055,
						"alias": " - lambada_openai",
						"perplexity,none": 3.371030447212134,
						"perplexity_stderr,none": 0.06463160820980969
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.01785367746943528,
						"acc_stderr,none": 0.0018448625404454664,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 222.47893875405592,
						"perplexity_stderr,none": 6.527243835865852
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.38230157189986413,
						"acc_stderr,none": 0.006770229062388661,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 51.992350380317696,
						"perplexity_stderr,none": 3.0317507291100503
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7442266640791771,
						"acc_stderr,none": 0.006078442596011089,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.370473032065976,
						"perplexity_stderr,none": 0.0646119763158426
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.42712982728507665,
						"acc_stderr,none": 0.006891601045518706,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 39.56866298143678,
						"perplexity_stderr,none": 2.045595111842822
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5216378808461091,
						"acc_stderr,none": 0.006959451688268363,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 19.11026107700648,
						"perplexity_stderr,none": 0.9213692883412197
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.4880652047351058,
						"acc_stderr,none": 0.006963992915953921,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 29.285916053190796,
						"perplexity_stderr,none": 1.6460582925639256
					},
					"lambada_standard": {
						"acc,none": 0.6867843974383854,
						"acc_stderr,none": 0.006461658130130337,
						"alias": " - lambada_standard",
						"perplexity,none": 3.954881074397146,
						"perplexity_stderr,none": 0.07740025677012692
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.020376479720551136,
						"acc_stderr,none": 0.0019683678049975282,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 172.34613017428583,
						"perplexity_stderr,none": 4.797854923663695
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.2582697201017812,
						"exact_match_stderr,get-answer": 0.011042608058378037
					},
					"logiqa": {
						"acc,none": 0.21505376344086022,
						"acc_norm,none": 0.27035330261136714,
						"acc_norm_stderr,none": 0.01742069478339314,
						"acc_stderr,none": 0.016115240864129177,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.22519083969465647,
						"acc_norm,none": 0.2703562340966921,
						"acc_norm_stderr,none": 0.011205610942827099,
						"acc_stderr,none": 0.010538641739267836,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2884422110552764,
						"acc_norm,none": 0.2847571189279732,
						"acc_norm_stderr,none": 0.008261609390900714,
						"acc_stderr,none": 0.008293447257027539,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.35267951705147216,
						"acc_stderr,none": 0.004917459532283326,
						"alias": "mc_taco",
						"f1,none": 0.5080489375402447,
						"f1_stderr,none": 0.005485419330756455
					},
					"medmcqa": {
						"acc,none": 0.2749223045661009,
						"acc_norm,none": 0.2749223045661009,
						"acc_norm_stderr,none": 0.006904070961661399,
						"acc_stderr,none": 0.006904070961661399,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.2788688138256088,
						"acc_norm,none": 0.2788688138256088,
						"acc_norm_stderr,none": 0.012573713423721657,
						"acc_stderr,none": 0.012573713423721657,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.25124626121635096,
						"acc_stderr,none": 0.040123172634924875,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932268,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.03785714465066654,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.27631578947368424,
						"acc_stderr,none": 0.03639057569952925,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.22264150943396227,
						"acc_stderr,none": 0.025604233470899095,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2916666666666667,
						"acc_stderr,none": 0.038009680605548574,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.15,
						"acc_stderr,none": 0.0358870281282637,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.21,
						"acc_stderr,none": 0.040936018074033256,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2543352601156069,
						"acc_stderr,none": 0.033205564430855705,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.17647058823529413,
						"acc_stderr,none": 0.037932811853078084,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.26382978723404255,
						"acc_stderr,none": 0.02880998985410297,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.23684210526315788,
						"acc_stderr,none": 0.03999423879281335,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.27586206896551724,
						"acc_stderr,none": 0.03724563619774632,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.20899470899470898,
						"acc_stderr,none": 0.02094048156533485,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.04006168083848878,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816507,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.25161290322580643,
						"acc_stderr,none": 0.024685979286239963,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.24630541871921183,
						"acc_stderr,none": 0.030315099285617732,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.24242424242424243,
						"acc_stderr,none": 0.033464098810559534,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.23737373737373738,
						"acc_stderr,none": 0.0303137105381989,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.23316062176165803,
						"acc_stderr,none": 0.03051611137147601,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.23076923076923078,
						"acc_stderr,none": 0.02136202772522272,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.2074074074074074,
						"acc_stderr,none": 0.024720713193952134,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2184873949579832,
						"acc_stderr,none": 0.02684151432295894,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2185430463576159,
						"acc_stderr,none": 0.03374235550425694,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.22752293577981653,
						"acc_stderr,none": 0.017974463578776502,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2037037037037037,
						"acc_stderr,none": 0.027467401804057982,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.03039153369274154,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.2911392405063291,
						"acc_stderr,none": 0.029571601065753374,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.32286995515695066,
						"acc_stderr,none": 0.03138147637575498,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2748091603053435,
						"acc_stderr,none": 0.03915345408847834,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.26057385759829976,
						"acc_stderr,none": 0.03396620709314821,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.24793388429752067,
						"acc_stderr,none": 0.03941897526516302,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3148148148148148,
						"acc_stderr,none": 0.04489931073591311,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2147239263803681,
						"acc_stderr,none": 0.03226219377286774,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841044,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.21359223300970873,
						"acc_stderr,none": 0.04058042015646035,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3034188034188034,
						"acc_stderr,none": 0.030118210106942645,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.25798212005108556,
						"acc_stderr,none": 0.015645830188348947,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3063583815028902,
						"acc_stderr,none": 0.024818350129436596,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2581699346405229,
						"acc_stderr,none": 0.025058503316958143,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.25555197940135177,
						"acc_stderr,none": 0.04086467458875185,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.22508038585209003,
						"acc_stderr,none": 0.023720088516179034,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.27469135802469136,
						"acc_stderr,none": 0.02483605786829468,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2765957446808511,
						"acc_stderr,none": 0.026684564340460987,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2588005215123859,
						"acc_stderr,none": 0.011186109046564613,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.17647058823529413,
						"acc_stderr,none": 0.02315746830855935,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.25163398692810457,
						"acc_stderr,none": 0.01755581809132227,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.04265792110940589,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.21224489795918366,
						"acc_stderr,none": 0.026176967197866767,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2430939226519337,
						"acc_stderr,none": 0.03690183735776518,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.26865671641791045,
						"acc_stderr,none": 0.03134328358208954,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.24104027909927053,
						"acc_stderr,none": 0.04824039520692752,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.37,
						"acc_stderr,none": 0.04852365870939098,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.27710843373493976,
						"acc_stderr,none": 0.03484331592680589,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.036155076303109344,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.389302088639837,
						"acc_stderr,none": 0.004921908745191761,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.40398698128559807,
						"acc_stderr,none": 0.004948945942126175,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6495098039215687,
						"acc_stderr,none": 0.023650133032612784,
						"alias": "mrpc",
						"f1,none": 0.7740916271721959,
						"f1_stderr,none": 0.018415233852137208
					},
					"multimedqa": {
						"acc,none": 0.2958126330731015,
						"acc_norm,none": 0.2754739830660537,
						"acc_norm_stderr,none": 8.751546615277735e-05,
						"acc_stderr,none": 0.09499139878568526,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5717821782178217,
						"acc_stderr,none": 0.007107406686707522,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7146350654603127,
						"mrr_stderr,none": 0.01029037211320983,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.41196388261851014,
						"r@2_stderr,none": 0.01654473961960943
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6554740424148117,
						"mrr_stderr,none": 0.010354114317480992,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4683972911963883,
						"r@2_stderr,none": 0.01677371055764036
					},
					"openbookqa": {
						"acc,none": 0.306,
						"acc_norm,none": 0.442,
						"acc_norm_stderr,none": 0.02223197069632112,
						"acc_stderr,none": 0.02062956999834541,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4215,
						"acc_stderr,none": 0.01104444950789629,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.387,
						"acc_stderr,none": 0.010893798117218191,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4165,
						"acc_stderr,none": 0.011026090074507977,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.011135708419359796,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.011107641056719627,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5455,
						"acc_stderr,none": 0.011136735987003711,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.499,
						"acc_stderr,none": 0.011183113654770175,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48192857142857143,
						"acc_stderr,none": 0.04936705250378216,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.794885745375408,
						"acc_norm,none": 0.8052230685527747,
						"acc_norm_stderr,none": 0.00924000669331772,
						"acc_stderr,none": 0.009420971671017913,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.24263450042698548,
						"acc_norm,none": 0.27940862510674636,
						"acc_norm_stderr,none": 0.003278216147759991,
						"acc_stderr,none": 0.003131858896197657,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.664,
						"acc_stderr,none": 0.021144791425048843,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7167411129823723,
						"acc_norm,none": 0.6232890725893111,
						"acc_norm_stderr,none": 0.004376227713404006,
						"acc_stderr,none": 0.1530606905089029,
						"alias": "pythia",
						"bits_per_byte,none": 0.6449064054167248,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5636378422407153,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 3.371030447212134,
						"perplexity_stderr,none": 0.06463160820980969,
						"word_perplexity,none": 10.91776141132558,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3900709219858156,
						"acc_norm,none": 0.4574468085106383,
						"acc_norm_stderr,none": 0.050384093205958665,
						"acc_stderr,none": 0.044605867998649226,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.48333333333333334,
						"acc_norm,none": 0.575,
						"acc_norm_stderr,none": 0.04531634835874828,
						"acc_stderr,none": 0.04580945392704764,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3625,
						"acc_norm,none": 0.46875,
						"acc_norm_stderr,none": 0.039575057062617526,
						"acc_stderr,none": 0.03812374340644891,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.36619718309859156,
						"acc_norm,none": 0.4014084507042254,
						"acc_norm_stderr,none": 0.02913837502274766,
						"acc_stderr,none": 0.02863791293383347,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5024711696869851,
						"acc_stderr,none": 0.006765327922882503,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.4761563195646797,
						"acc_stderr,none": 0.0024838715388372807,
						"alias": "qqp",
						"f1,none": 0.484808679364616,
						"f1_stderr,none": 0.0030249057585503628
					},
					"race": {
						"acc,none": 0.3703349282296651,
						"acc_stderr,none": 0.014945205447391755,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.6209386281588448,
						"acc_stderr,none": 0.029202804623788027,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.944,
						"acc_norm,none": 0.917,
						"acc_norm_stderr,none": 0.00872852720607479,
						"acc_stderr,none": 0.007274401481697053,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6245487364620939,
						"acc_stderr,none": 0.029147775180820408,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.49426605504587157,
						"acc_stderr,none": 0.016940739619904895,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5684794561631511,
						"acc_norm,none": 0.766220133959812,
						"acc_norm_stderr,none": 0.0029923408040942274,
						"acc_stderr,none": 0.0035017799172041577,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5511297460982996,
						"acc_stderr,none": 0.07908027444684382,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.7163461538461539,
						"acc_stderr,none": 0.004511544750177365,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.43660687138948007,
						"acc_stderr,none": 0.004993217691179919,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5001960784313726,
						"acc_stderr,none": 0.004950980035252737,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.2825705305262654,
						"acc_stderr,none": 0.0010887149901545557,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3108935128518972,
						"bleu_acc_stderr,none": 0.016203316673559693,
						"bleu_diff,none": -9.354374155744162,
						"bleu_diff_stderr,none": 0.8230553636710657,
						"bleu_max,none": 23.138368130095003,
						"bleu_max_stderr,none": 0.7650589816904417,
						"rouge1_acc,none": 0.2607099143206854,
						"rouge1_acc_stderr,none": 0.015368841620766372,
						"rouge1_diff,none": -12.920617255342343,
						"rouge1_diff_stderr,none": 0.896332620078308,
						"rouge1_max,none": 46.5363077764266,
						"rouge1_max_stderr,none": 0.9050743200460277,
						"rouge2_acc,none": 0.19461444308445533,
						"rouge2_acc_stderr,none": 0.01385939820702943,
						"rouge2_diff,none": -15.101502049728007,
						"rouge2_diff_stderr,none": 1.0680310771067492,
						"rouge2_max,none": 29.447400173329665,
						"rouge2_max_stderr,none": 1.0100666515515113,
						"rougeL_acc,none": 0.25091799265605874,
						"rougeL_acc_stderr,none": 0.01517698502770769,
						"rougeL_diff,none": -13.30538274852999,
						"rougeL_diff_stderr,none": 0.9101675355915293,
						"rougeL_max,none": 43.8008067041745,
						"rougeL_max_stderr,none": 0.9007127526702894
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3108935128518972,
						"bleu_acc_stderr,none": 0.016203316673559693,
						"bleu_diff,none": -9.354374155744162,
						"bleu_diff_stderr,none": 0.8230553636710657,
						"bleu_max,none": 23.138368130095003,
						"bleu_max_stderr,none": 0.7650589816904417,
						"rouge1_acc,none": 0.2607099143206854,
						"rouge1_acc_stderr,none": 0.015368841620766372,
						"rouge1_diff,none": -12.920617255342343,
						"rouge1_diff_stderr,none": 0.896332620078308,
						"rouge1_max,none": 46.5363077764266,
						"rouge1_max_stderr,none": 0.9050743200460277,
						"rouge2_acc,none": 0.19461444308445533,
						"rouge2_acc_stderr,none": 0.01385939820702943,
						"rouge2_diff,none": -15.101502049728007,
						"rouge2_diff_stderr,none": 1.0680310771067492,
						"rouge2_max,none": 29.447400173329665,
						"rouge2_max_stderr,none": 1.0100666515515113,
						"rougeL_acc,none": 0.25091799265605874,
						"rougeL_acc_stderr,none": 0.01517698502770769,
						"rougeL_diff,none": -13.30538274852999,
						"rougeL_diff_stderr,none": 0.9101675355915293,
						"rougeL_max,none": 43.8008067041745,
						"rougeL_max_stderr,none": 0.9007127526702894
					},
					"truthfulqa_mc1": {
						"acc,none": 0.22276621787025705,
						"acc_stderr,none": 0.014566506961396745,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.34235356921278304,
						"acc_stderr,none": 0.013269569074086149,
						"alias": "truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.1279527559055118,
						"exact_match_stderr,none": 0.007412076063525489
					},
					"wic": {
						"acc,none": 0.49843260188087773,
						"acc_stderr,none": 0.019810623954060382,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6449064054167248,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5636378422407153,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.91776141132558,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6708760852407262,
						"acc_stderr,none": 0.013206387089091474,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4788732394366197,
						"acc_stderr,none": 0.05970805879899504,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.375,
						"acc_stderr,none": 0.04770204856076104,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8424908424908425,
						"acc_stderr,none": 0.02208772806150051,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5596363636363636,
						"acc_stderr,none": 0.07164986756889576,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.022261697292270132,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.736,
						"acc_stderr,none": 0.019732885585922098,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.022187215803029008,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.022279694107843424,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.696,
						"acc_stderr,none": 0.020591649571224925,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.3897724230254351,
						"acc_stderr,none": 0.05644927888896788,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.009448900914617609,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3469879518072289,
						"acc_stderr,none": 0.009541251561568398,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.46265060240963857,
						"acc_stderr,none": 0.00999407262056142,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3522088353413655,
						"acc_stderr,none": 0.009574259292495748,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5457831325301205,
						"acc_stderr,none": 0.00997996999168044,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4771084337349398,
						"acc_stderr,none": 0.010011563747774337,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4927710843373494,
						"acc_stderr,none": 0.010021025361119623,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3381526104417671,
						"acc_stderr,none": 0.009482500057981033,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.40160642570281124,
						"acc_stderr,none": 0.009826103601507123,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3401606425702811,
						"acc_stderr,none": 0.009496174608136397,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.36224899598393573,
						"acc_stderr,none": 0.009634223618009011,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3538152610441767,
						"acc_stderr,none": 0.00958416277058285,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3329317269076305,
						"acc_stderr,none": 0.009446051001358226,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3562248995983936,
						"acc_stderr,none": 0.009598796305792159,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3506024096385542,
						"acc_stderr,none": 0.009564237156206102,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5603754286745685,
						"acc_stderr,none": 0.08604949243256223,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.47915287888815355,
						"acc_stderr,none": 0.01285593628288127,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7789543348775645,
						"acc_stderr,none": 0.010678457581809242,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6942422236929185,
						"acc_stderr,none": 0.011856480568871265,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5016545334215751,
						"acc_stderr,none": 0.012867054869163343,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.48643282594308407,
						"acc_stderr,none": 0.012862387586650072,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5552614162806089,
						"acc_stderr,none": 0.012788295970207787,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4831237590999338,
						"acc_stderr,none": 0.012859793919977602,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5380542686962276,
						"acc_stderr,none": 0.012829804720321695,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.48378557246856385,
						"acc_stderr,none": 0.01286035780505586,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5228325612177366,
						"acc_stderr,none": 0.01285370238487085,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6406353408338848,
						"acc_stderr,none": 0.012347659802101674,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7772533153517645,
						"acc_stderr,none": 0.07015448404595596,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.006740838111907551,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.05066394254941721,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6152241918665277,
						"acc_stderr,none": 0.015719467393137277,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7376425855513308,
						"acc_stderr,none": 0.02717809523883128,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5492063492063493,
						"acc_stderr,none": 0.02807966006822512,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7876984126984127,
						"acc_stderr,none": 0.018233607978187162,
						"alias": " - xwinograd_zh"
					}
				}
			},
			"trust_remote_code=True": {
				"confObj": {
					"trust_remote_code": "True"
				},
				"confStr": "trust_remote_code=True",
				"groups": {
					"lambada_multilingual": {
						"acc,none": 0.5126722297690666,
						"acc_stderr,none": 0.07140287801348252,
						"alias": "lambada_multilingual",
						"perplexity,none": 28.66553270480355,
						"perplexity_stderr,none": 10.316726322742879
					},
					"pawsx": {
						"acc,none": 0.48192857142857143,
						"acc_stderr,none": 0.04936705250378216,
						"alias": "pawsx"
					},
					"xcopa": {
						"acc,none": 0.5603636363636364,
						"acc_stderr,none": 0.06554478826396092,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.3897724230254351,
						"acc_stderr,none": 0.05644927888896788,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5603754286745685,
						"acc_stderr,none": 0.08604949243256223,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7772533153517645,
						"acc_stderr,none": 0.07653601170978505,
						"alias": "xwinograd"
					}
				},
				"results": {
					"lambada_multilingual": {
						"acc,none": 0.5126722297690666,
						"acc_stderr,none": 0.07140287801348252,
						"alias": "lambada_multilingual",
						"perplexity,none": 28.66553270480355,
						"perplexity_stderr,none": 10.316726322742879
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.38230157189986413,
						"acc_stderr,none": 0.006770229062388661,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 51.992350380317696,
						"perplexity_stderr,none": 3.0317507291100503
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7442266640791771,
						"acc_stderr,none": 0.006078442596011089,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 3.370473032065976,
						"perplexity_stderr,none": 0.0646119763158426
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.42712982728507665,
						"acc_stderr,none": 0.006891601045518706,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 39.56866298143678,
						"perplexity_stderr,none": 2.045595111842822
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.5216378808461091,
						"acc_stderr,none": 0.006959451688268363,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 19.11026107700648,
						"perplexity_stderr,none": 0.9213692883412197
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.4880652047351058,
						"acc_stderr,none": 0.006963992915953921,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 29.285916053190796,
						"perplexity_stderr,none": 1.6460582925639256
					},
					"paws_de": {
						"acc,none": 0.4215,
						"acc_stderr,none": 0.01104444950789629,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.387,
						"acc_stderr,none": 0.010893798117218191,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4165,
						"acc_stderr,none": 0.011026090074507977,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.011135708419359796,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.011107641056719627,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.5455,
						"acc_stderr,none": 0.011136735987003711,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.499,
						"acc_stderr,none": 0.011183113654770175,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.48192857142857143,
						"acc_stderr,none": 0.04936705250378216,
						"alias": "pawsx"
					},
					"xcopa": {
						"acc,none": 0.5603636363636364,
						"acc_stderr,none": 0.06554478826396092,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.022378596989230774,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.02238289498648352,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.02226169729227013,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.01968468882019472,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.518,
						"acc_stderr,none": 0.02236856511738799,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.566,
						"acc_stderr,none": 0.022187215803029008,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.022288147591176945,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.02226169729227013,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.7,
						"acc_stderr,none": 0.020514426225628053,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.3897724230254351,
						"acc_stderr,none": 0.05644927888896788,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.009448900914617609,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3469879518072289,
						"acc_stderr,none": 0.009541251561568398,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.46265060240963857,
						"acc_stderr,none": 0.00999407262056142,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3522088353413655,
						"acc_stderr,none": 0.009574259292495748,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5457831325301205,
						"acc_stderr,none": 0.00997996999168044,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4771084337349398,
						"acc_stderr,none": 0.010011563747774337,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.4927710843373494,
						"acc_stderr,none": 0.010021025361119623,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3381526104417671,
						"acc_stderr,none": 0.009482500057981033,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.40160642570281124,
						"acc_stderr,none": 0.009826103601507123,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3401606425702811,
						"acc_stderr,none": 0.009496174608136397,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.36224899598393573,
						"acc_stderr,none": 0.009634223618009011,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3538152610441767,
						"acc_stderr,none": 0.00958416277058285,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3329317269076305,
						"acc_stderr,none": 0.009446051001358226,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3562248995983936,
						"acc_stderr,none": 0.009598796305792159,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3506024096385542,
						"acc_stderr,none": 0.009564237156206102,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5603754286745685,
						"acc_stderr,none": 0.08604949243256223,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.47915287888815355,
						"acc_stderr,none": 0.01285593628288127,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7789543348775645,
						"acc_stderr,none": 0.010678457581809242,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6942422236929185,
						"acc_stderr,none": 0.011856480568871265,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5016545334215751,
						"acc_stderr,none": 0.012867054869163343,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.48643282594308407,
						"acc_stderr,none": 0.012862387586650072,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5552614162806089,
						"acc_stderr,none": 0.012788295970207787,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4831237590999338,
						"acc_stderr,none": 0.012859793919977602,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5380542686962276,
						"acc_stderr,none": 0.012829804720321695,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.48378557246856385,
						"acc_stderr,none": 0.01286035780505586,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5228325612177366,
						"acc_stderr,none": 0.01285370238487085,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6406353408338848,
						"acc_stderr,none": 0.012347659802101674,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7772533153517645,
						"acc_stderr,none": 0.07653601170978505,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.006740838111907551,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6987951807228916,
						"acc_stderr,none": 0.05066394254941721,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.6152241918665277,
						"acc_stderr,none": 0.015719467393137277,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.7376425855513308,
						"acc_stderr,none": 0.02717809523883128,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5492063492063493,
						"acc_stderr,none": 0.02807966006822512,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7876984126984127,
						"acc_stderr,none": 0.018233607978187162,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "tiiuae/falcon-7b"
	},
	"tiiuae/falcon-7b-instruct": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6206313416009019,
						"acc_norm,none": 0.5961104847801578,
						"acc_norm_stderr,none": 0.040572193219819606,
						"acc_stderr,none": 0.05176789584355605,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3321875,
						"acc_stderr,none": 0.015369436623221077,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.1507,
						"acc_stderr,none": 0.1909273416552665,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8253731343283582,
						"acc_stderr,none": 0.13912800717829274,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.23328380386329867,
						"acc_norm,none": 0.23328380386329867,
						"acc_norm_stderr,none": 0.10979282922577566,
						"acc_stderr,none": 0.10979282922577566,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2532377827663616,
						"acc_norm,none": 0.2532377827663616,
						"acc_norm_stderr,none": 0.03670273698689131,
						"acc_stderr,none": 0.03670273698689131,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.5965638044126416,
						"likelihood_diff_stderr,none": 0.39855794111199166,
						"pct_stereotype,none": 0.5815444245676804,
						"pct_stereotype_stderr,none": 0.08262012093860319
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.03395669291338583,
						"exact_match_stderr,none": 0.004018889171427487
					},
					"glue": {
						"acc,none": 0.5017947579710514,
						"acc_stderr,none": 0.07305656008767054,
						"alias": "glue",
						"f1,none": 0.37366624284399125,
						"f1_stderr,none": 0.000563701805538826,
						"mcc,none": 0.09100919686249848,
						"mcc_stderr,none": 0.0006683228300229339
					},
					"kmmlu": {
						"acc,none": 0.104678024833959,
						"acc_norm,none": 0.104678024833959,
						"acc_norm_stderr,none": 0.06112770934466188,
						"acc_stderr,none": 0.06112770934466188,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.47007235255426444,
						"acc_norm,none": 0.456,
						"acc_norm_stderr,none": 0.0004971222444889777,
						"acc_stderr,none": 0.046894040168665614,
						"alias": "kobest",
						"f1,none": 0.3594438949445148,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6043081699980594,
						"acc_stderr,none": 0.02046944395374157,
						"alias": "lambada",
						"perplexity,none": 6.312492612187061,
						"perplexity_stderr,none": 0.5958774081287487
					},
					"lambada_cloze": {
						"acc,none": 0.05385212497574229,
						"acc_stderr,none": 0.010427742857434174,
						"alias": "lambada_cloze",
						"perplexity,none": 167.60780560047505,
						"perplexity_stderr,none": 26.12464582063304
					},
					"lambada_multilingual": {
						"acc,none": 0.407995342518921,
						"acc_stderr,none": 0.07214177220176461,
						"alias": "lambada_multilingual",
						"perplexity,none": 68.38185050803729,
						"perplexity_stderr,none": 27.59046837744813
					},
					"mmlu": {
						"acc,none": 0.24611878649764993,
						"acc_stderr,none": 0.041213396126297264,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.24442082890541977,
						"acc_stderr,none": 0.03232120859053087,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.26713871902156416,
						"acc_stderr,none": 0.047628671564349224,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2414689632759181,
						"acc_stderr,none": 0.03774267534399638,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.232477006026007,
						"acc_stderr,none": 0.04505887426517013,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.31014904187366926,
						"acc_norm,none": 0.2904287616639172,
						"acc_norm_stderr,none": 0.00012791070647714844,
						"acc_stderr,none": 0.0865310747914122,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5179999999999999,
						"acc_stderr,none": 0.02953391228115302,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7155719704616607,
						"acc_norm,none": 0.6009050432594906,
						"acc_norm_stderr,none": 0.0038007406817995868,
						"acc_stderr,none": 0.13451139334073292,
						"alias": "pythia",
						"bits_per_byte,none": 0.7216991036274446,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6491231112090743,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.163936800564532,
						"perplexity_stderr,none": 0.11815168243198655,
						"word_perplexity,none": 14.512763264192767,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4078014184397163,
						"acc_norm,none": 0.44680851063829785,
						"acc_norm_stderr,none": 0.0498878256102602,
						"acc_stderr,none": 0.04205873301092161,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5057402415892982,
						"acc_stderr,none": 0.02638534450339749,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.39030482269902883,
						"acc_stderr,none": 0.05018414663582037,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.408812729498164,
						"bleu_acc_stderr,none": 0.00029618245306179796,
						"bleu_diff,none": -0.5917537366450426,
						"bleu_diff_stderr,none": 0.496514006316269,
						"bleu_max,none": 20.23903584845063,
						"bleu_max_stderr,none": 0.5293705196303082,
						"rouge1_acc,none": 0.40024479804161567,
						"rouge1_acc_stderr,none": 0.0002941775731400047,
						"rouge1_diff,none": -0.9448992510290869,
						"rouge1_diff_stderr,none": 0.9285337510790307,
						"rouge1_max,none": 44.337791988943025,
						"rouge1_max_stderr,none": 0.7770281181507921,
						"rouge2_acc,none": 0.3243574051407589,
						"rouge2_acc_stderr,none": 0.00026856578293028014,
						"rouge2_diff,none": -1.8854947271101368,
						"rouge2_diff_stderr,none": 1.1238072145814166,
						"rouge2_max,none": 28.245646644324392,
						"rouge2_max_stderr,none": 0.961496293506556,
						"rougeL_acc,none": 0.3880048959608323,
						"rougeL_acc_stderr,none": 0.00029100134395987175,
						"rougeL_diff,none": -1.2238854600226043,
						"rougeL_diff_stderr,none": 0.9303941677540448,
						"rougeL_max,none": 41.264123969153935,
						"rougeL_max_stderr,none": 0.7797666563366418
					},
					"xcopa": {
						"acc,none": 0.5367272727272727,
						"acc_stderr,none": 0.05343023789696368,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.3804016064257028,
						"acc_stderr,none": 0.05038205588716881,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5343842127429156,
						"acc_stderr,none": 0.08790474490100891,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7370195549561699,
						"acc_stderr,none": 0.08875586095747369,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6206313416009019,
						"acc_norm,none": 0.5961104847801578,
						"acc_norm_stderr,none": 0.040572193219819606,
						"acc_stderr,none": 0.05176789584355605,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3321875,
						"acc_stderr,none": 0.015369436623221077,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.323,
						"acc_stderr,none": 0.014794927843348639,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.327,
						"acc_stderr,none": 0.014842213153411242,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3441666666666667,
						"acc_stderr,none": 0.013720551062295756,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.40273037542662116,
						"acc_norm,none": 0.4283276450511945,
						"acc_norm_stderr,none": 0.014460496367599033,
						"acc_stderr,none": 0.014332236306790133,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7281144781144782,
						"acc_norm,none": 0.6788720538720538,
						"acc_norm_stderr,none": 0.009580787536986797,
						"acc_stderr,none": 0.009129795867310489,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.1507,
						"acc_stderr,none": 0.1909273416552665,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0235,
						"acc_stderr,none": 0.0033881580257424785,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.3865,
						"acc_stderr,none": 0.010891197550868486,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.108,
						"acc_stderr,none": 0.006942052725816977,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.7195,
						"acc_stderr,none": 0.010047903023894477,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0305,
						"acc_stderr,none": 0.003846072169833613,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.2055,
						"acc_stderr,none": 0.009037461637895075,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000151,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.029,
						"acc_stderr,none": 0.003753204400460514,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.004,
						"acc_stderr,none": 0.0014117352790976897,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.011279826464208243,
						"acc_stderr,none": 0.0022001215223643937,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8253731343283582,
						"acc_stderr,none": 0.13912800717829274,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523743,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.996,
						"acc_stderr,none": 0.001996994739098729,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767676,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.773,
						"acc_stderr,none": 0.013253174964763933,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345703,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.689,
						"acc_stderr,none": 0.014645596385722694,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.01507060460376841,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.012510816141264374,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.882,
						"acc_stderr,none": 0.010206869264381791,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.001730316154346935,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.005219506034410038,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.938,
						"acc_stderr,none": 0.007629823996280307,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.941,
						"acc_stderr,none": 0.00745483565040673,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.927,
						"acc_stderr,none": 0.00823035471524406,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942307,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333443,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.00512808904927529,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.011297239823409275,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.737,
						"acc_stderr,none": 0.013929286594259729,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.717,
						"acc_stderr,none": 0.014251810906481747,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.807,
						"acc_stderr,none": 0.012486268734370143,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523706,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.831,
						"acc_stderr,none": 0.011856625977890122,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.0046408552592747026,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.467,
						"acc_stderr,none": 0.015784807891138782,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.009820001651345719,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425679,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.656,
						"acc_stderr,none": 0.015029633724408947,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.01284137457209693,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.0091443763931511,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.814,
						"acc_stderr,none": 0.012310790208412793,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904605,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.00997775303139723,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.758,
						"acc_stderr,none": 0.013550631705555954,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336666,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.015818793703510886,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.648,
						"acc_stderr,none": 0.015110404505648666,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.681,
						"acc_stderr,none": 0.01474640486547349,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045065,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.792,
						"acc_stderr,none": 0.012841374572096938,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.885,
						"acc_stderr,none": 0.010093407594904624,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.91,
						"acc_stderr,none": 0.009054390204866439,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.012510816141264368,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.999,
						"acc_stderr,none": 0.0010000000000000018,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.0069604200625714135,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448847,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.011569479368271306,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.01420569610409151,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.471,
						"acc_stderr,none": 0.0157926694516289,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.948,
						"acc_stderr,none": 0.007024624213817148,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713329,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.98,
						"acc_stderr,none": 0.004429403980178346,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.012886662332274531,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.483,
						"acc_stderr,none": 0.015810153729833427,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524282,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.768,
						"acc_stderr,none": 0.013354937452281581,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.668,
						"acc_stderr,none": 0.01489959724281148,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.843,
						"acc_stderr,none": 0.011510146979230185,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.01129723982340928,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.01120341539516033,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160336,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.919,
						"acc_stderr,none": 0.008632121032139973,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792976,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.945,
						"acc_stderr,none": 0.007212976294639244,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.954,
						"acc_stderr,none": 0.006627814717380711,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.436,
						"acc_stderr,none": 0.015689173023144057,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.358,
						"acc_stderr,none": 0.015167928865407557,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7097859327217125,
						"acc_stderr,none": 0.007938079855173705,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.5357142857142857,
						"acc_stderr,none": 0.06724777654937658,
						"alias": "cb",
						"f1,none": 0.367053620784964,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.23328380386329867,
						"acc_norm,none": 0.23328380386329867,
						"acc_norm_stderr,none": 0.10979282922577566,
						"acc_stderr,none": 0.10979282922577566,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.1836734693877551,
						"acc_norm,none": 0.1836734693877551,
						"acc_norm_stderr,none": 0.05589005688828227,
						"acc_stderr,none": 0.05589005688828227,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.42424242424242425,
						"acc_norm,none": 0.42424242424242425,
						"acc_norm_stderr,none": 0.08736789844447573,
						"acc_stderr,none": 0.08736789844447573,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757577,
						"acc_stderr,none": 0.07575757575757577,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.1702127659574468,
						"acc_norm,none": 0.1702127659574468,
						"acc_norm_stderr,none": 0.055411578656325386,
						"acc_stderr,none": 0.055411578656325386,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445796,
						"acc_stderr,none": 0.06180629713445796,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.06861056852129649,
						"acc_stderr,none": 0.06861056852129649,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736842,
						"acc_stderr,none": 0.05263157894736842,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.16129032258064516,
						"acc_norm,none": 0.16129032258064516,
						"acc_norm_stderr,none": 0.06715051611181073,
						"acc_stderr,none": 0.06715051611181073,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.2903225806451613,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.08287246824945245,
						"acc_stderr,none": 0.08287246824945245,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271772,
						"acc_stderr,none": 0.10163945352271772,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.07102933373079214,
						"acc_stderr,none": 0.07102933373079214,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.06148754619013454,
						"acc_stderr,none": 0.06148754619013454,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2532377827663616,
						"acc_norm,none": 0.2532377827663616,
						"acc_norm_stderr,none": 0.03670273698689131,
						"acc_stderr,none": 0.03670273698689131,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.03279317792268949,
						"acc_stderr,none": 0.03279317792268949,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018759,
						"acc_stderr,none": 0.03489370652018759,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.0340150671524904,
						"acc_stderr,none": 0.0340150671524904,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.24401913875598086,
						"acc_norm,none": 0.24401913875598086,
						"acc_norm_stderr,none": 0.029780753228706106,
						"acc_stderr,none": 0.029780753228706106,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018761,
						"acc_stderr,none": 0.03489370652018761,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2748091603053435,
						"acc_norm,none": 0.2748091603053435,
						"acc_norm_stderr,none": 0.03915345408847836,
						"acc_stderr,none": 0.03915345408847836,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.2426470588235294,
						"acc_norm,none": 0.2426470588235294,
						"acc_norm_stderr,none": 0.03689519326996807,
						"acc_stderr,none": 0.03689519326996807,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2616822429906542,
						"acc_norm,none": 0.2616822429906542,
						"acc_norm_stderr,none": 0.04269291915728108,
						"acc_stderr,none": 0.04269291915728108,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.024539600216850282,
						"acc_stderr,none": 0.024539600216850282,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.25980392156862747,
						"acc_norm,none": 0.25980392156862747,
						"acc_norm_stderr,none": 0.030778554678693257,
						"acc_stderr,none": 0.030778554678693257,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.24022346368715083,
						"acc_norm,none": 0.24022346368715083,
						"acc_norm_stderr,none": 0.032021424638044936,
						"acc_stderr,none": 0.032021424638044936,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25738396624472576,
						"acc_norm,none": 0.25738396624472576,
						"acc_norm_stderr,none": 0.028458820991460295,
						"acc_stderr,none": 0.028458820991460295,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.04198857662371224,
						"acc_stderr,none": 0.04198857662371224,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.045223500773820306,
						"acc_stderr,none": 0.045223500773820306,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.33962264150943394,
						"acc_norm,none": 0.33962264150943394,
						"acc_norm_stderr,none": 0.046216787599682646,
						"acc_stderr,none": 0.046216787599682646,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.23148148148148148,
						"acc_norm,none": 0.23148148148148148,
						"acc_norm_stderr,none": 0.04077494709252627,
						"acc_stderr,none": 0.04077494709252627,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.0425201622376331,
						"acc_stderr,none": 0.0425201622376331,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2454212454212454,
						"acc_norm,none": 0.2454212454212454,
						"acc_norm_stderr,none": 0.02609299388422865,
						"acc_stderr,none": 0.02609299388422865,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2696078431372549,
						"acc_norm,none": 0.2696078431372549,
						"acc_norm_stderr,none": 0.031145570659486782,
						"acc_stderr,none": 0.031145570659486782,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.23391812865497075,
						"acc_norm,none": 0.23391812865497075,
						"acc_norm_stderr,none": 0.032467217651178264,
						"acc_stderr,none": 0.032467217651178264,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.035589261576067566,
						"acc_stderr,none": 0.035589261576067566,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.036948460554439046,
						"acc_stderr,none": 0.036948460554439046,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.034229240176444506,
						"acc_stderr,none": 0.034229240176444506,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.24539877300613497,
						"acc_norm,none": 0.24539877300613497,
						"acc_norm_stderr,none": 0.03380939813943354,
						"acc_stderr,none": 0.03380939813943354,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.02827139981698855,
						"acc_stderr,none": 0.02827139981698855,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2474747474747475,
						"acc_norm,none": 0.2474747474747475,
						"acc_norm_stderr,none": 0.03074630074212451,
						"acc_stderr,none": 0.03074630074212451,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.029344572500634346,
						"acc_stderr,none": 0.029344572500634346,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2782608695652174,
						"acc_norm,none": 0.2782608695652174,
						"acc_norm_stderr,none": 0.029614094221633722,
						"acc_stderr,none": 0.029614094221633722,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.23776223776223776,
						"acc_norm,none": 0.23776223776223776,
						"acc_norm_stderr,none": 0.03572502141815571,
						"acc_stderr,none": 0.03572502141815571,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2556818181818182,
						"acc_norm,none": 0.2556818181818182,
						"acc_norm_stderr,none": 0.032976929254344596,
						"acc_stderr,none": 0.032976929254344596,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2483221476510067,
						"acc_norm,none": 0.2483221476510067,
						"acc_norm_stderr,none": 0.035513440416974296,
						"acc_stderr,none": 0.035513440416974296,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.03820699814849796,
						"acc_stderr,none": 0.03820699814849796,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2033898305084746,
						"acc_norm,none": 0.2033898305084746,
						"acc_norm_stderr,none": 0.03721299370466347,
						"acc_stderr,none": 0.03721299370466347,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03391617237346009,
						"acc_stderr,none": 0.03391617237346009,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878284,
						"acc_stderr,none": 0.04122066502878284,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.21678321678321677,
						"acc_norm,none": 0.21678321678321677,
						"acc_norm_stderr,none": 0.03457877857147844,
						"acc_stderr,none": 0.03457877857147844,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.039325376803928704,
						"acc_stderr,none": 0.039325376803928704,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.23783783783783785,
						"acc_norm,none": 0.23783783783783785,
						"acc_norm_stderr,none": 0.031387393683304794,
						"acc_stderr,none": 0.031387393683304794,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.24817518248175183,
						"acc_norm,none": 0.24817518248175183,
						"acc_norm_stderr,none": 0.021332687690541908,
						"acc_stderr,none": 0.021332687690541908,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2616822429906542,
						"acc_norm,none": 0.2616822429906542,
						"acc_norm_stderr,none": 0.030117504361850386,
						"acc_stderr,none": 0.030117504361850386,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.040113743936211456,
						"acc_stderr,none": 0.040113743936211456,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.22950819672131148,
						"acc_norm,none": 0.22950819672131148,
						"acc_norm_stderr,none": 0.03822877895195425,
						"acc_stderr,none": 0.03822877895195425,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.23333333333333334,
						"acc_norm,none": 0.23333333333333334,
						"acc_norm_stderr,none": 0.029256242035383875,
						"acc_stderr,none": 0.029256242035383875,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25555555555555554,
						"acc_norm,none": 0.25555555555555554,
						"acc_norm_stderr,none": 0.032601103040276455,
						"acc_stderr,none": 0.032601103040276455,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871163,
						"acc_stderr,none": 0.03152480234871163,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.2672413793103448,
						"acc_norm,none": 0.2672413793103448,
						"acc_norm_stderr,none": 0.041265147363240995,
						"acc_stderr,none": 0.041265147363240995,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2620689655172414,
						"acc_norm,none": 0.2620689655172414,
						"acc_norm_stderr,none": 0.036646663372252565,
						"acc_stderr,none": 0.036646663372252565,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055042,
						"acc_stderr,none": 0.04232473532055042,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.032888897342098225,
						"acc_stderr,none": 0.032888897342098225,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.25118483412322273,
						"acc_norm,none": 0.25118483412322273,
						"acc_norm_stderr,none": 0.029927771242945215,
						"acc_stderr,none": 0.029927771242945215,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2526595744680851,
						"acc_norm,none": 0.2526595744680851,
						"acc_norm_stderr,none": 0.022439412582786405,
						"acc_stderr,none": 0.022439412582786405,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.24568965517241378,
						"acc_norm,none": 0.24568965517241378,
						"acc_norm_stderr,none": 0.028324514684171145,
						"acc_stderr,none": 0.028324514684171145,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.033291151121447815,
						"acc_stderr,none": 0.033291151121447815,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174023,
						"acc_stderr,none": 0.03749850709174023,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.02895216745089081,
						"acc_stderr,none": 0.02895216745089081,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.031629303956979486,
						"acc_stderr,none": 0.031629303956979486,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.03334150198101962,
						"acc_stderr,none": 0.03334150198101962,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2546583850931677,
						"acc_norm,none": 0.2546583850931677,
						"acc_norm_stderr,none": 0.03444265995779324,
						"acc_stderr,none": 0.03444265995779324,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.25625,
						"acc_norm,none": 0.25625,
						"acc_norm_stderr,none": 0.03462157845865141,
						"acc_stderr,none": 0.03462157845865141,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.09100919686249848,
						"mcc_stderr,none": 0.02585194054656118
					},
					"copa": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.03487350880197771,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.5965638044126416,
						"likelihood_diff_stderr,none": 0.39855794111199166,
						"pct_stereotype,none": 0.5815444245676804,
						"pct_stereotype_stderr,none": 0.08262012093860319
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.7528324388789507,
						"likelihood_diff_stderr,none": 0.08793942918301238,
						"pct_stereotype,none": 0.6404293381037567,
						"pct_stereotype_stderr,none": 0.011721703372467203
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.289835164835165,
						"likelihood_diff_stderr,none": 0.37871017241684846,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.048650425541051985
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.420454545454546,
						"likelihood_diff_stderr,none": 1.676735255102152,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 5.611538461538461,
						"likelihood_diff_stderr,none": 0.5765090661926897,
						"pct_stereotype,none": 0.7538461538461538,
						"pct_stereotype_stderr,none": 0.05384615384615383
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.98515625,
						"likelihood_diff_stderr,none": 0.17191350714799736,
						"pct_stereotype,none": 0.659375,
						"pct_stereotype_stderr,none": 0.0265343929755315
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.396412037037037,
						"likelihood_diff_stderr,none": 0.2272194271004726,
						"pct_stereotype,none": 0.6111111111111112,
						"pct_stereotype_stderr,none": 0.03324708911809117
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.654513888888889,
						"likelihood_diff_stderr,none": 0.334984294117765,
						"pct_stereotype,none": 0.7916666666666666,
						"pct_stereotype_stderr,none": 0.04819715314419525
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.5789862204724407,
						"likelihood_diff_stderr,none": 0.15172064557035136,
						"pct_stereotype,none": 0.5039370078740157,
						"pct_stereotype_stderr,none": 0.02220509119300217
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.04054054054054,
						"likelihood_diff_stderr,none": 0.38621690611919407,
						"pct_stereotype,none": 0.7387387387387387,
						"pct_stereotype_stderr,none": 0.041887708614323976
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.68010752688172,
						"likelihood_diff_stderr,none": 0.5003558456652057,
						"pct_stereotype,none": 0.8494623655913979,
						"pct_stereotype_stderr,none": 0.03728212869390004
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.3256578947368425,
						"likelihood_diff_stderr,none": 0.25366071121131345,
						"pct_stereotype,none": 0.7263157894736842,
						"pct_stereotype_stderr,none": 0.032430729061898395
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.442046809779368,
						"likelihood_diff_stderr,none": 0.07825890552275785,
						"pct_stereotype,none": 0.5223613595706619,
						"pct_stereotype_stderr,none": 0.012201079063310654
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.422222222222222,
						"likelihood_diff_stderr,none": 0.3153789222723632,
						"pct_stereotype,none": 0.6,
						"pct_stereotype_stderr,none": 0.05192907868894985
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 2.6346153846153846,
						"likelihood_diff_stderr,none": 0.7497945149143601,
						"pct_stereotype,none": 0.7692307692307693,
						"pct_stereotype_stderr,none": 0.12162606385262997
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.46969696969697,
						"likelihood_diff_stderr,none": 0.36413015230551904,
						"pct_stereotype,none": 0.7424242424242424,
						"pct_stereotype_stderr,none": 0.054240275510565296
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.086448598130841,
						"likelihood_diff_stderr,none": 0.1459694586556403,
						"pct_stereotype,none": 0.5545171339563862,
						"pct_stereotype_stderr,none": 0.0277842065133898
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.642292490118577,
						"likelihood_diff_stderr,none": 0.20411943761903553,
						"pct_stereotype,none": 0.3715415019762846,
						"pct_stereotype_stderr,none": 0.030439793183749016
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.4375,
						"likelihood_diff_stderr,none": 0.40520473147921854,
						"pct_stereotype,none": 0.6527777777777778,
						"pct_stereotype_stderr,none": 0.056501146768529645
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.435326086956522,
						"likelihood_diff_stderr,none": 0.1526212888039778,
						"pct_stereotype,none": 0.4369565217391304,
						"pct_stereotype_stderr,none": 0.023151745316873383
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.4456521739130435,
						"likelihood_diff_stderr,none": 0.302840333238616,
						"pct_stereotype,none": 0.5478260869565217,
						"pct_stereotype_stderr,none": 0.046614569799583463
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 2.909340659340659,
						"likelihood_diff_stderr,none": 0.275542435485542,
						"pct_stereotype,none": 0.7362637362637363,
						"pct_stereotype_stderr,none": 0.04644942852497395
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 3.730548469387755,
						"likelihood_diff_stderr,none": 0.2879345848478076,
						"pct_stereotype,none": 0.5714285714285714,
						"pct_stereotype_stderr,none": 0.035438495596916704
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.03395669291338583,
						"exact_match_stderr,none": 0.004018889171427487
					},
					"glue": {
						"acc,none": 0.5017947579710514,
						"acc_stderr,none": 0.07305656008767054,
						"alias": "glue",
						"f1,none": 0.37366624284399125,
						"f1_stderr,none": 0.000563701805538826,
						"mcc,none": 0.09100919686249848,
						"mcc_stderr,none": 0.0006683228300229339
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.039423805913570885,
						"exact_match_stderr,get-answer": 0.005360280030342448
					},
					"hellaswag": {
						"acc,none": 0.5166301533559052,
						"acc_norm,none": 0.6977693686516631,
						"acc_norm_stderr,none": 0.004582861219020882,
						"acc_stderr,none": 0.004987020679861267,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.104678024833959,
						"acc_norm,none": 0.104678024833959,
						"acc_norm_stderr,none": 0.06112770934466188,
						"acc_stderr,none": 0.06112770934466188,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.03861229196653697,
						"acc_stderr,none": 0.03861229196653697,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.091,
						"acc_norm,none": 0.091,
						"acc_norm_stderr,none": 0.009099549538400233,
						"acc_stderr,none": 0.009099549538400233,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.08,
						"acc_norm,none": 0.08,
						"acc_norm_stderr,none": 0.008583336977753651,
						"acc_stderr,none": 0.008583336977753651,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.191,
						"acc_norm,none": 0.191,
						"acc_norm_stderr,none": 0.012436787112179479,
						"acc_stderr,none": 0.012436787112179479,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.204,
						"acc_norm,none": 0.204,
						"acc_norm_stderr,none": 0.012749374359024394,
						"acc_stderr,none": 0.012749374359024394,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.015697473824603857,
						"acc_stderr,none": 0.015697473824603857,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.02,
						"acc_norm,none": 0.02,
						"acc_norm_stderr,none": 0.004429403980178348,
						"acc_stderr,none": 0.004429403980178348,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.05,
						"acc_norm,none": 0.05,
						"acc_norm_stderr,none": 0.006895472974897892,
						"acc_stderr,none": 0.006895472974897892,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.027,
						"acc_norm,none": 0.027,
						"acc_norm_stderr,none": 0.005128089049275286,
						"acc_stderr,none": 0.005128089049275286,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.037,
						"acc_norm,none": 0.037,
						"acc_norm_stderr,none": 0.005972157622389647,
						"acc_stderr,none": 0.005972157622389647,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.04063619567656727,
						"acc_stderr,none": 0.04063619567656727,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.024,
						"acc_norm,none": 0.024,
						"acc_norm_stderr,none": 0.00484225644172708,
						"acc_stderr,none": 0.00484225644172708,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.035,
						"acc_norm,none": 0.035,
						"acc_norm_stderr,none": 0.005814534272734956,
						"acc_stderr,none": 0.005814534272734956,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.182,
						"acc_norm,none": 0.182,
						"acc_norm_stderr,none": 0.012207580637662146,
						"acc_stderr,none": 0.012207580637662146,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.039,
						"acc_norm,none": 0.039,
						"acc_norm_stderr,none": 0.006125072776426137,
						"acc_stderr,none": 0.006125072776426137,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.137,
						"acc_norm,none": 0.137,
						"acc_norm_stderr,none": 0.010878848714333316,
						"acc_stderr,none": 0.010878848714333316,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.117,
						"acc_norm,none": 0.117,
						"acc_norm_stderr,none": 0.010169287802713329,
						"acc_stderr,none": 0.010169287802713329,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.084,
						"acc_norm,none": 0.084,
						"acc_norm_stderr,none": 0.008776162089491111,
						"acc_stderr,none": 0.008776162089491111,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.073,
						"acc_norm,none": 0.073,
						"acc_norm_stderr,none": 0.008230354715244066,
						"acc_stderr,none": 0.008230354715244066,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.040201512610368445,
						"acc_stderr,none": 0.040201512610368445,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.028,
						"acc_norm,none": 0.028,
						"acc_norm_stderr,none": 0.0052195060344100595,
						"acc_stderr,none": 0.0052195060344100595,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.048,
						"acc_norm,none": 0.048,
						"acc_norm_stderr,none": 0.006763264133666661,
						"acc_stderr,none": 0.006763264133666661,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.061,
						"acc_norm,none": 0.061,
						"acc_norm_stderr,none": 0.007572076091557422,
						"acc_stderr,none": 0.007572076091557422,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.013314551335935938,
						"acc_stderr,none": 0.013314551335935938,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.082,
						"acc_norm,none": 0.082,
						"acc_norm_stderr,none": 0.00868051561552372,
						"acc_stderr,none": 0.00868051561552372,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.182,
						"acc_norm,none": 0.182,
						"acc_norm_stderr,none": 0.01220758063766218,
						"acc_stderr,none": 0.01220758063766218,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.13666666666666666,
						"acc_norm,none": 0.13666666666666666,
						"acc_norm_stderr,none": 0.014034829611310272,
						"acc_stderr,none": 0.014034829611310272,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.108,
						"acc_norm,none": 0.108,
						"acc_norm_stderr,none": 0.00982000165134571,
						"acc_stderr,none": 0.00982000165134571,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.108,
						"acc_norm,none": 0.108,
						"acc_norm_stderr,none": 0.009820001651345708,
						"acc_stderr,none": 0.009820001651345708,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.065,
						"acc_norm,none": 0.065,
						"acc_norm_stderr,none": 0.00779973306183203,
						"acc_stderr,none": 0.00779973306183203,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.088,
						"acc_norm,none": 0.088,
						"acc_norm_stderr,none": 0.00896305396259208,
						"acc_stderr,none": 0.00896305396259208,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.21666666666666667,
						"acc_norm,none": 0.21666666666666667,
						"acc_norm_stderr,none": 0.02382504669967184,
						"acc_stderr,none": 0.02382504669967184,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920828,
						"acc_stderr,none": 0.013512312258920828,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.045,
						"acc_norm,none": 0.045,
						"acc_norm_stderr,none": 0.006558812241406107,
						"acc_stderr,none": 0.006558812241406107,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.115,
						"acc_norm,none": 0.115,
						"acc_norm_stderr,none": 0.010093407594904603,
						"acc_stderr,none": 0.010093407594904603,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.02808592343999728,
						"acc_stderr,none": 0.02808592343999728,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.139,
						"acc_norm,none": 0.139,
						"acc_norm_stderr,none": 0.01094526376104296,
						"acc_stderr,none": 0.01094526376104296,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.151,
						"acc_norm,none": 0.151,
						"acc_norm_stderr,none": 0.011328165223341676,
						"acc_stderr,none": 0.011328165223341676,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.02912242397001744,
						"acc_stderr,none": 0.02912242397001744,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.039,
						"acc_norm,none": 0.039,
						"acc_norm_stderr,none": 0.006125072776426138,
						"acc_stderr,none": 0.006125072776426138,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.47007235255426444,
						"acc_norm,none": 0.456,
						"acc_norm_stderr,none": 0.0004971222444889777,
						"acc_stderr,none": 0.046894040168665614,
						"alias": "kobest",
						"f1,none": 0.3594438949445148,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.475,
						"acc_stderr,none": 0.015799513429996016,
						"alias": " - kobest_copa",
						"f1,none": 0.47402747680461177,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.304,
						"acc_norm,none": 0.456,
						"acc_norm_stderr,none": 0.022296238348407063,
						"acc_stderr,none": 0.020591649571224932,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.30223941634495843,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.49622166246851385,
						"acc_stderr,none": 0.025125227983562776,
						"alias": " - kobest_sentineg",
						"f1,none": 0.33164983164983164,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6043081699980594,
						"acc_stderr,none": 0.02046944395374157,
						"alias": "lambada",
						"perplexity,none": 6.312492612187061,
						"perplexity_stderr,none": 0.5958774081287487
					},
					"lambada_cloze": {
						"acc,none": 0.05385212497574229,
						"acc_stderr,none": 0.010427742857434174,
						"alias": "lambada_cloze",
						"perplexity,none": 167.60780560047505,
						"perplexity_stderr,none": 26.12464582063304
					},
					"lambada_multilingual": {
						"acc,none": 0.407995342518921,
						"acc_stderr,none": 0.07214177220176461,
						"alias": "lambada_multilingual",
						"perplexity,none": 68.38185050803729,
						"perplexity_stderr,none": 27.59046837744813
					},
					"lambada_openai": {
						"acc,none": 0.6425383271880458,
						"acc_stderr,none": 0.006676922474252462,
						"alias": " - lambada_openai",
						"perplexity,none": 5.163936800564532,
						"perplexity_stderr,none": 0.11815168243198655
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.07374345041723268,
						"acc_stderr,none": 0.003641157339863476,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 116.40378584253361,
						"perplexity_stderr,none": 3.3013778970366108
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.28662914806908596,
						"acc_stderr,none": 0.0062998459440006494,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 129.51797585932053,
						"perplexity_stderr,none": 7.462176305091401
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6437026974577916,
						"acc_stderr,none": 0.006672076306559641,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 5.161489882730245,
						"perplexity_stderr,none": 0.1180789147110526
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.309528430040753,
						"acc_stderr,none": 0.006440732259116662,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 101.76523964681523,
						"perplexity_stderr,none": 5.487720561651144
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.4296526295361925,
						"acc_stderr,none": 0.006896687243014184,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 37.29005100308671,
						"perplexity_stderr,none": 1.8677912540341028
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.37046380749078206,
						"acc_stderr,none": 0.006728144610304279,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 68.17449614823377,
						"perplexity_stderr,none": 3.900414464675555
					},
					"lambada_standard": {
						"acc,none": 0.5656898893848243,
						"acc_stderr,none": 0.006905597334082778,
						"alias": " - lambada_standard",
						"perplexity,none": 7.463144127936145,
						"perplexity_stderr,none": 0.18476224309462724
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.03396079953425189,
						"acc_stderr,none": 0.0025234714805461474,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 218.8118253584165,
						"perplexity_stderr,none": 6.5610475674042705
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.23791348600508905,
						"exact_match_stderr,get-answer": 0.010742950531023875
					},
					"logiqa": {
						"acc,none": 0.2350230414746544,
						"acc_norm,none": 0.29185867895545314,
						"acc_norm_stderr,none": 0.017831570553971922,
						"acc_stderr,none": 0.01663116682389096,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.23791348600508905,
						"acc_norm,none": 0.2748091603053435,
						"acc_norm_stderr,none": 0.011262989126884547,
						"acc_stderr,none": 0.010742950531023854,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2901172529313233,
						"acc_norm,none": 0.28509212730318256,
						"acc_norm_stderr,none": 0.008264531564961748,
						"acc_stderr,none": 0.008307697593432431,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.34494810421520866,
						"acc_stderr,none": 0.004892217269483982,
						"alias": "mc_taco",
						"f1,none": 0.5079163020128888,
						"f1_stderr,none": 0.005457892127483224
					},
					"medmcqa": {
						"acc,none": 0.301458283528568,
						"acc_norm,none": 0.301458283528568,
						"acc_norm_stderr,none": 0.007096068027380243,
						"acc_stderr,none": 0.007096068027380243,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.2655145326001571,
						"acc_norm,none": 0.2655145326001571,
						"acc_norm_stderr,none": 0.012382039817647825,
						"acc_stderr,none": 0.012382039817647825,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.24611878649764993,
						"acc_stderr,none": 0.041213396126297264,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.22962962962962963,
						"acc_stderr,none": 0.036333844140734636,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.20394736842105263,
						"acc_stderr,none": 0.0327900040631005,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.049236596391733084,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.23018867924528302,
						"acc_stderr,none": 0.02590789712240817,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2708333333333333,
						"acc_stderr,none": 0.03716177437566016,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.03861229196653694,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.039427724440366234,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.24277456647398843,
						"acc_stderr,none": 0.0326926380614177,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.24509803921568626,
						"acc_stderr,none": 0.04280105837364395,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.28936170212765955,
						"acc_stderr,none": 0.029644006577009618,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2631578947368421,
						"acc_stderr,none": 0.04142439719489362,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2206896551724138,
						"acc_stderr,none": 0.03455930201924813,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.21957671957671956,
						"acc_stderr,none": 0.02132001859977034,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2698412698412698,
						"acc_stderr,none": 0.039701582732351734,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.039427724440366234,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.22903225806451613,
						"acc_stderr,none": 0.023904914311782655,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2019704433497537,
						"acc_stderr,none": 0.028247350122180267,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421276,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.24848484848484848,
						"acc_stderr,none": 0.03374402644139404,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.23232323232323232,
						"acc_stderr,none": 0.030088629490217483,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.18652849740932642,
						"acc_stderr,none": 0.028112091210117467,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.23333333333333334,
						"acc_stderr,none": 0.021444547301560483,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.24444444444444444,
						"acc_stderr,none": 0.02620276653465215,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.22268907563025211,
						"acc_stderr,none": 0.02702543349888237,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2185430463576159,
						"acc_stderr,none": 0.03374235550425694,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.23302752293577983,
						"acc_stderr,none": 0.018125669180861476,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_stderr,none": 0.025416428388767478,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.25980392156862747,
						"acc_stderr,none": 0.030778554678693275,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.29957805907172996,
						"acc_stderr,none": 0.029818024749753095,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.33183856502242154,
						"acc_stderr,none": 0.03160295143776679,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.29770992366412213,
						"acc_stderr,none": 0.040103589424622034,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.24442082890541977,
						"acc_stderr,none": 0.03232120859053087,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2809917355371901,
						"acc_stderr,none": 0.04103203830514512,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.043300437496507437,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2085889570552147,
						"acc_stderr,none": 0.031921934489347235,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841044,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.24271844660194175,
						"acc_stderr,none": 0.04245022486384493,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.2948717948717949,
						"acc_stderr,none": 0.02987257770889117,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.28607918263090676,
						"acc_stderr,none": 0.016160871405127532,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2543352601156069,
						"acc_stderr,none": 0.02344582627654555,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.014242630070574885,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.024288619466046095,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.26713871902156416,
						"acc_stderr,none": 0.047628671564349224,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.19292604501607716,
						"acc_stderr,none": 0.022411516780911363,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.023788583551658533,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2553191489361702,
						"acc_stderr,none": 0.026011992930902013,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.23533246414602346,
						"acc_stderr,none": 0.010834432543912215,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.1948529411764706,
						"acc_stderr,none": 0.024060599423487424,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.25980392156862747,
						"acc_stderr,none": 0.017740899509177795,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.33636363636363636,
						"acc_stderr,none": 0.04525393596302506,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.19591836734693877,
						"acc_stderr,none": 0.025409301953225678,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2414689632759181,
						"acc_stderr,none": 0.03774267534399638,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.23880597014925373,
						"acc_stderr,none": 0.030147775935409217,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.232477006026007,
						"acc_stderr,none": 0.04505887426517013,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.29518072289156627,
						"acc_stderr,none": 0.0355092018568963,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.30994152046783624,
						"acc_stderr,none": 0.035469769593931624,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.39113601630157924,
						"acc_stderr,none": 0.004926074999684564,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3846623270951994,
						"acc_stderr,none": 0.004906792939109787,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.5490196078431373,
						"acc_stderr,none": 0.024664683843663434,
						"alias": "mrpc",
						"f1,none": 0.5982532751091703,
						"f1_stderr,none": 0.02729146916790877
					},
					"multimedqa": {
						"acc,none": 0.31014904187366926,
						"acc_norm,none": 0.2904287616639172,
						"acc_norm_stderr,none": 0.00012791070647714844,
						"acc_stderr,none": 0.0865310747914122,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5717821782178217,
						"acc_stderr,none": 0.007107406686707522,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7016553816219485,
						"mrr_stderr,none": 0.010313316129485398,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4164785553047404,
						"r@2_stderr,none": 0.016571167127661955
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6634687753415269,
						"mrr_stderr,none": 0.010516925299234132,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4435665914221219,
						"r@2_stderr,none": 0.016699919496280195
					},
					"openbookqa": {
						"acc,none": 0.3,
						"acc_norm,none": 0.412,
						"acc_norm_stderr,none": 0.02203367799374086,
						"acc_stderr,none": 0.02051442622562805,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.45,
						"acc_stderr,none": 0.011127079848413737,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.5065,
						"acc_stderr,none": 0.0111821910061423,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.471,
						"acc_stderr,none": 0.011164310140373722,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.011131484850525779,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.011107641056719627,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.011122493197456285,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5405,
						"acc_stderr,none": 0.011146389370464352,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5179999999999999,
						"acc_stderr,none": 0.02953391228115302,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7840043525571273,
						"acc_norm,none": 0.7818280739934712,
						"acc_norm_stderr,none": 0.009636081958374383,
						"acc_stderr,none": 0.009601236303553553,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.310898804440649,
						"acc_norm,none": 0.3024658411614005,
						"acc_norm_stderr,none": 0.0033557844621478214,
						"acc_stderr,none": 0.003381615137984905,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.021513662527582397,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7155719704616607,
						"acc_norm,none": 0.6009050432594906,
						"acc_norm_stderr,none": 0.0038007406817995868,
						"acc_stderr,none": 0.13451139334073292,
						"alias": "pythia",
						"bits_per_byte,none": 0.7216991036274446,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6491231112090743,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 5.163936800564532,
						"perplexity_stderr,none": 0.11815168243198655,
						"word_perplexity,none": 14.512763264192767,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4078014184397163,
						"acc_norm,none": 0.44680851063829785,
						"acc_norm_stderr,none": 0.0498878256102602,
						"acc_stderr,none": 0.04205873301092161,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4666666666666667,
						"acc_norm,none": 0.5666666666666667,
						"acc_norm_stderr,none": 0.04542567625794981,
						"acc_stderr,none": 0.0457329560380023,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.35625,
						"acc_norm,none": 0.4125,
						"acc_norm_stderr,none": 0.03904067786683381,
						"acc_stderr,none": 0.03797847267587851,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.4119718309859155,
						"acc_norm,none": 0.4154929577464789,
						"acc_norm_stderr,none": 0.02929432462367856,
						"acc_stderr,none": 0.02925766134209262,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5041186161449753,
						"acc_stderr,none": 0.006765181024578747,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5461538461538461,
						"acc_stderr,none": 0.002476083739902788,
						"alias": "qqp",
						"f1,none": 0.370769178011728,
						"f1_stderr,none": 0.003616970893250515
					},
					"race": {
						"acc,none": 0.3712918660287081,
						"acc_stderr,none": 0.014953126515089406,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.6028880866425993,
						"acc_stderr,none": 0.02945237137834682,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.925,
						"acc_norm,none": 0.882,
						"acc_norm_stderr,none": 0.010206869264381796,
						"acc_stderr,none": 0.008333333333333361,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.6028880866425993,
						"acc_stderr,none": 0.02945237137834682,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8853211009174312,
						"acc_stderr,none": 0.010796502452107722,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5328401479556133,
						"acc_norm,none": 0.7313306008197541,
						"acc_norm_stderr,none": 0.003133983438560071,
						"acc_stderr,none": 0.003527458786823168,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5057402415892982,
						"acc_stderr,none": 0.02638534450339749,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5509815705128205,
						"acc_stderr,none": 0.0049781741348531834,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.44714705584270803,
						"acc_stderr,none": 0.005005638076975459,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5181372549019608,
						"acc_stderr,none": 0.004947721994519879,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.39030482269902883,
						"acc_stderr,none": 0.05018414663582037,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.408812729498164,
						"bleu_acc_stderr,none": 0.00029618245306179796,
						"bleu_diff,none": -0.5917537366450426,
						"bleu_diff_stderr,none": 0.496514006316269,
						"bleu_max,none": 20.23903584845063,
						"bleu_max_stderr,none": 0.5293705196303082,
						"rouge1_acc,none": 0.40024479804161567,
						"rouge1_acc_stderr,none": 0.0002941775731400047,
						"rouge1_diff,none": -0.9448992510290869,
						"rouge1_diff_stderr,none": 0.9285337510790307,
						"rouge1_max,none": 44.337791988943025,
						"rouge1_max_stderr,none": 0.7770281181507921,
						"rouge2_acc,none": 0.3243574051407589,
						"rouge2_acc_stderr,none": 0.00026856578293028014,
						"rouge2_diff,none": -1.8854947271101368,
						"rouge2_diff_stderr,none": 1.1238072145814166,
						"rouge2_max,none": 28.245646644324392,
						"rouge2_max_stderr,none": 0.961496293506556,
						"rougeL_acc,none": 0.3880048959608323,
						"rougeL_acc_stderr,none": 0.00029100134395987175,
						"rougeL_diff,none": -1.2238854600226043,
						"rougeL_diff_stderr,none": 0.9303941677540448,
						"rougeL_max,none": 41.264123969153935,
						"rougeL_max_stderr,none": 0.7797666563366418
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.408812729498164,
						"bleu_acc_stderr,none": 0.017209952151641734,
						"bleu_diff,none": -0.5917537366450426,
						"bleu_diff_stderr,none": 0.7046374999361509,
						"bleu_max,none": 20.23903584845063,
						"bleu_max_stderr,none": 0.7275785315897578,
						"rouge1_acc,none": 0.40024479804161567,
						"rouge1_acc_stderr,none": 0.017151605555749138,
						"rouge1_diff,none": -0.9448992510290869,
						"rouge1_diff_stderr,none": 0.9636045615702692,
						"rouge1_max,none": 44.337791988943025,
						"rouge1_max_stderr,none": 0.8814919841670666,
						"rouge2_acc,none": 0.3243574051407589,
						"rouge2_acc_stderr,none": 0.016387976779647942,
						"rouge2_diff,none": -1.8854947271101368,
						"rouge2_diff_stderr,none": 1.0600977382210646,
						"rouge2_max,none": 28.245646644324392,
						"rouge2_max_stderr,none": 0.980559173893425,
						"rougeL_acc,none": 0.3880048959608323,
						"rougeL_acc_stderr,none": 0.017058761501347972,
						"rougeL_diff,none": -1.2238854600226043,
						"rougeL_diff_stderr,none": 0.9645694209096849,
						"rougeL_max,none": 41.264123969153935,
						"rougeL_max_stderr,none": 0.8830439719156922
					},
					"truthfulqa_mc1": {
						"acc,none": 0.28886168910648713,
						"acc_stderr,none": 0.01586634640138431,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.4410263894952997,
						"acc_stderr,none": 0.014842963050056827,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.03395669291338583,
						"exact_match_stderr,none": 0.004018889171427487
					},
					"wic": {
						"acc,none": 0.5015673981191222,
						"acc_stderr,none": 0.019810623954060382,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.7216991036274446,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6491231112090743,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 14.512763264192767,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6661404893449092,
						"acc_stderr,none": 0.013254029695143343,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6442307692307693,
						"acc_stderr,none": 0.04717221961050337,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8315018315018315,
						"acc_stderr,none": 0.02269576078832295,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5367272727272727,
						"acc_stderr,none": 0.05343023789696368,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.022365160424231326,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.02238289498648353,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.668,
						"acc_stderr,none": 0.021081766571222852,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.022383074051792257,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.528,
						"acc_stderr,none": 0.022347949832668093,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.56,
						"acc_stderr,none": 0.022221331534143032,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.504,
						"acc_stderr,none": 0.022382357781962143,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.022318338119870527,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.022382357781962143,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.021728881438701705,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.3804016064257028,
						"acc_stderr,none": 0.05038205588716881,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3281124497991968,
						"acc_stderr,none": 0.009411247685593385,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337342,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4598393574297189,
						"acc_stderr,none": 0.009989691810169666,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3309236947791165,
						"acc_stderr,none": 0.009431685461463305,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.521285140562249,
						"acc_stderr,none": 0.01001298760450043,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.4542168674698795,
						"acc_stderr,none": 0.00997996999168044,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.463855421686747,
						"acc_stderr,none": 0.009995852282822374,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3506024096385542,
						"acc_stderr,none": 0.009564237156206107,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.36586345381526103,
						"acc_stderr,none": 0.009654692765572584,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3598393574297189,
						"acc_stderr,none": 0.009620250217765983,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3562248995983936,
						"acc_stderr,none": 0.009598796305792174,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667055,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3674698795180723,
						"acc_stderr,none": 0.00966360190372803,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3469879518072289,
						"acc_stderr,none": 0.009541251561568398,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3313253012048193,
						"acc_stderr,none": 0.009434574056101956,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5343842127429156,
						"acc_stderr,none": 0.08790474490100891,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.47054930509596293,
						"acc_stderr,none": 0.012844785490017002,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7763070814030444,
						"acc_stderr,none": 0.010723941055690175,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6598279285241562,
						"acc_stderr,none": 0.012192034998028832,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.4937127729980146,
						"acc_stderr,none": 0.012866108021218212,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.47253474520185307,
						"acc_stderr,none": 0.012847698270388223,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.4937127729980146,
						"acc_stderr,none": 0.01286610802121821,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4559894109861019,
						"acc_stderr,none": 0.012817182901076042,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.4831237590999338,
						"acc_stderr,none": 0.012859793919977602,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.4784910655195235,
						"acc_stderr,none": 0.012855214257296603,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.49106551952349436,
						"acc_stderr,none": 0.0128650709173208,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.6029119788219722,
						"acc_stderr,none": 0.012591627740247465,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7370195549561699,
						"acc_stderr,none": 0.08875586095747369,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8572043010752688,
						"acc_stderr,none": 0.007257410222542498,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6867469879518072,
						"acc_stderr,none": 0.05121994210658146,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5516162669447341,
						"acc_stderr,none": 0.016067958526765066,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6387832699619772,
						"acc_stderr,none": 0.02967632026804158,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5301587301587302,
						"acc_stderr,none": 0.028165256808123703,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.7242063492063492,
						"acc_stderr,none": 0.019926879903661533,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "tiiuae/falcon-7b-instruct"
	},
	"tiiuae/falcon-rw-1b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5177564825253663,
						"acc_norm,none": 0.487880496054115,
						"acc_norm_stderr,none": 0.0415451267779369,
						"acc_stderr,none": 0.05436675799088168,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3221875,
						"acc_stderr,none": 0.016175849032190083,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.00485,
						"acc_stderr,none": 0.004763700905079409,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8250746268656717,
						"acc_stderr,none": 0.1615006598383841,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.24294205052005943,
						"acc_norm,none": 0.24294205052005943,
						"acc_norm_stderr,none": 0.11550023999885095,
						"acc_stderr,none": 0.11550023999885095,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.25280607839751357,
						"acc_norm,none": 0.25280607839751357,
						"acc_norm_stderr,none": 0.035910200262794206,
						"acc_stderr,none": 0.035910200262794206,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.651787045319022,
						"likelihood_diff_stderr,none": 0.5001289650727238,
						"pct_stereotype,none": 0.5178890876565295,
						"pct_stereotype_stderr,none": 0.09552933693924155
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.038385826771653545,
						"exact_match_stderr,none": 0.004263152964967611
					},
					"glue": {
						"acc,none": 0.4686464223210568,
						"acc_stderr,none": 0.05654791396954022,
						"alias": "glue",
						"f1,none": 0.3409102870116671,
						"f1_stderr,none": 0.0018230856522465648,
						"mcc,none": -0.11838753687089222,
						"mcc_stderr,none": 0.000786660324547111
					},
					"kmmlu": {
						"acc,none": 0.09962460294542304,
						"acc_norm,none": 0.09962460294542304,
						"acc_norm_stderr,none": 0.06323865722618321,
						"acc_stderr,none": 0.06323865722618321,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.4733611050208288,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.0004959919839679348,
						"acc_stderr,none": 0.04266767895177174,
						"alias": "kobest",
						"f1,none": 0.36279047110467133,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5221230351251698,
						"acc_stderr,none": 0.01487175982410965,
						"alias": "lambada",
						"perplexity,none": 8.747892506290377,
						"perplexity_stderr,none": 0.7285467462700776
					},
					"lambada_cloze": {
						"acc,none": 0.020473510576363284,
						"acc_stderr,none": 0.004052993270403501,
						"alias": "lambada_cloze",
						"perplexity,none": 511.6365866558847,
						"perplexity_stderr,none": 97.5105304587382
					},
					"lambada_multilingual": {
						"acc,none": 0.2400543372792548,
						"acc_stderr,none": 0.09088143462697339,
						"alias": "lambada_multilingual",
						"perplexity,none": 662.8674557562039,
						"perplexity_stderr,none": 243.04118466571217
					},
					"mmlu": {
						"acc,none": 0.2486113089303518,
						"acc_stderr,none": 0.040856044284379195,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.24675876726886292,
						"acc_stderr,none": 0.031094799064549938,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2658513035082073,
						"acc_stderr,none": 0.04392059667120125,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.23691907702307444,
						"acc_stderr,none": 0.040415046664276935,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2457976530288614,
						"acc_stderr,none": 0.04775804508575561,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.31540099361249113,
						"acc_norm,none": 0.29453016370441626,
						"acc_norm_stderr,none": 0.000121755179501889,
						"acc_stderr,none": 0.08101958172475976,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5199999999999999,
						"acc_stderr,none": 0.030346024059203204,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7047725619336289,
						"acc_norm,none": 0.49567593073624916,
						"acc_norm_stderr,none": 0.004671941763959614,
						"acc_stderr,none": 0.15851846314770499,
						"alias": "pythia",
						"bits_per_byte,none": 0.7637309010984497,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6978757668324775,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 7.3760781814578875,
						"perplexity_stderr,none": 0.18775687331294497,
						"word_perplexity,none": 16.959400823743298,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3971631205673759,
						"acc_norm_stderr,none": 0.050466078388769486,
						"acc_stderr,none": 0.04103293115145991,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5268044324648098,
						"acc_stderr,none": 0.04061405929928613,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.31329620142995696,
						"acc_stderr,none": 0.04710751277400382,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2827417380660955,
						"bleu_acc_stderr,none": 0.0002485279995361041,
						"bleu_diff,none": -8.823006485159095,
						"bleu_diff_stderr,none": 0.5504856625018505,
						"bleu_max,none": 22.42911157053935,
						"bleu_max_stderr,none": 0.5141825023827793,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.00024182670675721563,
						"rouge1_diff,none": -10.907633820974988,
						"rouge1_diff_stderr,none": 0.668917888327363,
						"rouge1_max,none": 47.29833255797588,
						"rouge1_max_stderr,none": 0.7273489736793618,
						"rouge2_acc,none": 0.18604651162790697,
						"rouge2_acc_stderr,none": 0.00018557991070955286,
						"rouge2_diff,none": -13.584114530496528,
						"rouge2_diff_stderr,none": 0.8918655242742985,
						"rouge2_max,none": 30.231307567989813,
						"rouge2_max_stderr,none": 0.9151376823291036,
						"rougeL_acc,none": 0.24479804161566707,
						"rougeL_acc_stderr,none": 0.00022655877504510204,
						"rougeL_diff,none": -11.400076809718822,
						"rougeL_diff_stderr,none": 0.6788849097243875,
						"rougeL_max,none": 44.417870376655785,
						"rougeL_max_stderr,none": 0.7300331328268088
					},
					"xcopa": {
						"acc,none": 0.5225454545454544,
						"acc_stderr,none": 0.029445850681849933,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.3538688085676037,
						"acc_stderr,none": 0.040343837101509725,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5094759641417483,
						"acc_stderr,none": 0.056903140792870545,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.6745336030568667,
						"acc_stderr,none": 0.09162336475916147,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5177564825253663,
						"acc_norm,none": 0.487880496054115,
						"acc_norm_stderr,none": 0.0415451267779369,
						"acc_stderr,none": 0.05436675799088168,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3221875,
						"acc_stderr,none": 0.016175849032190083,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.305,
						"acc_stderr,none": 0.014566646394664396,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.014758652303574886,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3383333333333333,
						"acc_stderr,none": 0.013664144006618275,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.2883959044368601,
						"acc_norm,none": 0.31569965870307165,
						"acc_norm_stderr,none": 0.013582571095815291,
						"acc_stderr,none": 0.013238394422428171,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6308922558922558,
						"acc_norm,none": 0.5728114478114478,
						"acc_norm_stderr,none": 0.010150415974210868,
						"acc_stderr,none": 0.00990198741024273,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.00485,
						"acc_stderr,none": 0.004763700905079409,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0035,
						"acc_stderr,none": 0.0013208888574315683,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.0075,
						"acc_stderr,none": 0.0019296986470519835,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.022,
						"acc_stderr,none": 0.0032807593162018913,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.0125,
						"acc_stderr,none": 0.00248494717876267,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.001,
						"acc_stderr,none": 0.0007069298939339458,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521539,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000151,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.005206073752711497,
						"acc_stderr,none": 0.0014992721829171724,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8250746268656717,
						"acc_stderr,none": 0.1615006598383841,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.008583336977753653,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329837,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045057,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319317,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491123,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.742,
						"acc_stderr,none": 0.013842963108656604,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.015740004693383856,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.764,
						"acc_stderr,none": 0.013434451402438687,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.01060525678479657,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298215,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298484,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.0061998740663370576,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.966,
						"acc_stderr,none": 0.005733836139695462,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571409,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.922,
						"acc_stderr,none": 0.00848457353011858,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.007855297938697594,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.0052195060344100465,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695801,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.775,
						"acc_stderr,none": 0.013211720158614755,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.695,
						"acc_stderr,none": 0.0145666463946644,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.837,
						"acc_stderr,none": 0.011686212712746835,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.00858333697775365,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936718,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.987,
						"acc_stderr,none": 0.00358383088940363,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.413,
						"acc_stderr,none": 0.015577986829936531,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319324,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.635,
						"acc_stderr,none": 0.0152317762262649,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.749,
						"acc_stderr,none": 0.013718133516888945,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329798,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651525,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.931,
						"acc_stderr,none": 0.00801893405031515,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.651,
						"acc_stderr,none": 0.015080663991563102,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.871,
						"acc_stderr,none": 0.010605256784796582,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.377,
						"acc_stderr,none": 0.015333170125779857,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.649,
						"acc_stderr,none": 0.015100563798316405,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.587,
						"acc_stderr,none": 0.015577986829936533,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286427,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.68,
						"acc_stderr,none": 0.014758652303574888,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592072,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.916,
						"acc_stderr,none": 0.008776162089491115,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.77,
						"acc_stderr,none": 0.01331455133593595,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474915,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.0014135055705578159,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963528019,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.809,
						"acc_stderr,none": 0.012436787112179486,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.459,
						"acc_stderr,none": 0.015766025737882158,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571401,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.913,
						"acc_stderr,none": 0.008916866630745895,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.00223158687484488,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.764,
						"acc_stderr,none": 0.013434451402438714,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.415,
						"acc_stderr,none": 0.015589035185604627,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.847,
						"acc_stderr,none": 0.011389500459665537,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792925,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.601,
						"acc_stderr,none": 0.015493193313162908,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397234,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696879,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.733,
						"acc_stderr,none": 0.013996674851796271,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.011569479368271287,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295446,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333378,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565534,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.975,
						"acc_stderr,none": 0.004939574819698462,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.403,
						"acc_stderr,none": 0.015518757419066533,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.313,
						"acc_stderr,none": 0.014671272822977885,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6168195718654435,
						"acc_stderr,none": 0.008503021391450783,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.42857142857142855,
						"acc_stderr,none": 0.06672848092813058,
						"alias": "cb",
						"f1,none": 0.21956970232832304,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.24294205052005943,
						"acc_norm,none": 0.24294205052005943,
						"acc_norm_stderr,none": 0.11550023999885095,
						"acc_stderr,none": 0.11550023999885095,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966263,
						"acc_stderr,none": 0.06520506636966263,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.48484848484848486,
						"acc_norm,none": 0.48484848484848486,
						"acc_norm_stderr,none": 0.08834775598250456,
						"acc_stderr,none": 0.08834775598250456,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736841,
						"acc_stderr,none": 0.05263157894736841,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.07872958216222171,
						"acc_stderr,none": 0.07872958216222171,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.23404255319148937,
						"acc_norm,none": 0.23404255319148937,
						"acc_norm_stderr,none": 0.062426763436828805,
						"acc_stderr,none": 0.062426763436828805,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.07770873402002615,
						"acc_stderr,none": 0.07770873402002615,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445796,
						"acc_stderr,none": 0.06180629713445796,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502246,
						"acc_stderr,none": 0.07401656182502246,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.375,
						"acc_norm,none": 0.375,
						"acc_norm_stderr,none": 0.125,
						"acc_stderr,none": 0.125,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.2413793103448276,
						"acc_norm,none": 0.2413793103448276,
						"acc_norm_stderr,none": 0.080869237238335,
						"acc_stderr,none": 0.080869237238335,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.06861056852129647,
						"acc_stderr,none": 0.06861056852129647,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.16129032258064516,
						"acc_norm,none": 0.16129032258064516,
						"acc_norm_stderr,none": 0.06715051611181073,
						"acc_stderr,none": 0.06715051611181073,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.3870967741935484,
						"acc_norm,none": 0.3870967741935484,
						"acc_norm_stderr,none": 0.08892934678767887,
						"acc_stderr,none": 0.08892934678767887,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522558,
						"acc_stderr,none": 0.11369720523522558,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.1049727762162956,
						"acc_stderr,none": 0.1049727762162956,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755131,
						"acc_stderr,none": 0.08780518530755131,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.15,
						"acc_norm,none": 0.15,
						"acc_norm_stderr,none": 0.0819178021909125,
						"acc_stderr,none": 0.0819178021909125,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141223,
						"acc_stderr,none": 0.06372446937141223,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271772,
						"acc_stderr,none": 0.10163945352271772,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.1111111111111111,
						"acc_norm,none": 0.1111111111111111,
						"acc_norm_stderr,none": 0.07622159339667062,
						"acc_stderr,none": 0.07622159339667062,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.1836734693877551,
						"acc_norm,none": 0.1836734693877551,
						"acc_norm_stderr,none": 0.05589005688828226,
						"acc_stderr,none": 0.05589005688828226,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0679170334216026,
						"acc_stderr,none": 0.0679170334216026,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.06148754619013454,
						"acc_stderr,none": 0.06148754619013454,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764436,
						"acc_stderr,none": 0.09361833424764436,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.25280607839751357,
						"acc_norm,none": 0.25280607839751357,
						"acc_norm_stderr,none": 0.035910200262794206,
						"acc_stderr,none": 0.035910200262794206,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.2635135135135135,
						"acc_norm,none": 0.2635135135135135,
						"acc_norm_stderr,none": 0.036335000433819875,
						"acc_stderr,none": 0.036335000433819875,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.03489370652018759,
						"acc_stderr,none": 0.03489370652018759,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.24880382775119617,
						"acc_norm,none": 0.24880382775119617,
						"acc_norm_stderr,none": 0.029975990636702532,
						"acc_stderr,none": 0.029975990636702532,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.034049163262375844,
						"acc_stderr,none": 0.034049163262375844,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.24427480916030533,
						"acc_norm,none": 0.24427480916030533,
						"acc_norm_stderr,none": 0.037683359597287414,
						"acc_stderr,none": 0.037683359597287414,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037267799624996496,
						"acc_stderr,none": 0.037267799624996496,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.04218811928205305,
						"acc_stderr,none": 0.04218811928205305,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25077399380804954,
						"acc_norm,none": 0.25077399380804954,
						"acc_norm_stderr,none": 0.024155705949743284,
						"acc_stderr,none": 0.024155705949743284,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.030587591351604257,
						"acc_stderr,none": 0.030587591351604257,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.25139664804469275,
						"acc_norm,none": 0.25139664804469275,
						"acc_norm_stderr,none": 0.032515888371841106,
						"acc_stderr,none": 0.032515888371841106,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25738396624472576,
						"acc_norm,none": 0.25738396624472576,
						"acc_norm_stderr,none": 0.028458820991460295,
						"acc_stderr,none": 0.028458820991460295,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.04084247315337099,
						"acc_stderr,none": 0.04084247315337099,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.3177570093457944,
						"acc_norm,none": 0.3177570093457944,
						"acc_norm_stderr,none": 0.045223500773820306,
						"acc_stderr,none": 0.045223500773820306,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.3113207547169811,
						"acc_norm,none": 0.3113207547169811,
						"acc_norm_stderr,none": 0.0451874553177075,
						"acc_stderr,none": 0.0451874553177075,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.040191074725573483,
						"acc_stderr,none": 0.040191074725573483,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.043960933774393765,
						"acc_stderr,none": 0.043960933774393765,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2454212454212454,
						"acc_norm,none": 0.2454212454212454,
						"acc_norm_stderr,none": 0.026092993884228654,
						"acc_stderr,none": 0.026092993884228654,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.24509803921568626,
						"acc_norm,none": 0.24509803921568626,
						"acc_norm_stderr,none": 0.030190282453501954,
						"acc_stderr,none": 0.030190282453501954,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.24561403508771928,
						"acc_norm,none": 0.24561403508771928,
						"acc_norm_stderr,none": 0.03301405946987251,
						"acc_stderr,none": 0.03301405946987251,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2585034013605442,
						"acc_norm,none": 0.2585034013605442,
						"acc_norm_stderr,none": 0.03623358323071023,
						"acc_stderr,none": 0.03623358323071023,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2446043165467626,
						"acc_norm,none": 0.2446043165467626,
						"acc_norm_stderr,none": 0.03659146222520568,
						"acc_stderr,none": 0.03659146222520568,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.24528301886792453,
						"acc_norm,none": 0.24528301886792453,
						"acc_norm_stderr,none": 0.034229240176444506,
						"acc_stderr,none": 0.034229240176444506,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25153374233128833,
						"acc_norm,none": 0.25153374233128833,
						"acc_norm_stderr,none": 0.03408997886857529,
						"acc_stderr,none": 0.03408997886857529,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.22093023255813954,
						"acc_norm,none": 0.22093023255813954,
						"acc_norm_stderr,none": 0.03172617353438933,
						"acc_stderr,none": 0.03172617353438933,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.028394293050790515,
						"acc_stderr,none": 0.028394293050790515,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.26262626262626265,
						"acc_norm,none": 0.26262626262626265,
						"acc_norm_stderr,none": 0.031353050095330855,
						"acc_stderr,none": 0.031353050095330855,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2647058823529412,
						"acc_norm,none": 0.2647058823529412,
						"acc_norm_stderr,none": 0.028657491285071963,
						"acc_stderr,none": 0.028657491285071963,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2826086956521739,
						"acc_norm,none": 0.2826086956521739,
						"acc_norm_stderr,none": 0.02975452853823325,
						"acc_stderr,none": 0.02975452853823325,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.037857144650666544,
						"acc_stderr,none": 0.037857144650666544,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.24475524475524477,
						"acc_norm,none": 0.24475524475524477,
						"acc_norm_stderr,none": 0.03607993033081377,
						"acc_stderr,none": 0.03607993033081377,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.23295454545454544,
						"acc_norm,none": 0.23295454545454544,
						"acc_norm_stderr,none": 0.031954139030501774,
						"acc_stderr,none": 0.031954139030501774,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2348993288590604,
						"acc_norm,none": 0.2348993288590604,
						"acc_norm_stderr,none": 0.034847315046501876,
						"acc_stderr,none": 0.034847315046501876,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.25443786982248523,
						"acc_norm,none": 0.25443786982248523,
						"acc_norm_stderr,none": 0.03360300796331527,
						"acc_stderr,none": 0.03360300796331527,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.038206998148497956,
						"acc_stderr,none": 0.038206998148497956,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714263,
						"acc_stderr,none": 0.04025566684714263,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2804878048780488,
						"acc_norm,none": 0.2804878048780488,
						"acc_norm_stderr,none": 0.03518700228801578,
						"acc_stderr,none": 0.03518700228801578,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.04265792110940589,
						"acc_stderr,none": 0.04265792110940589,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2517482517482518,
						"acc_norm,none": 0.2517482517482518,
						"acc_norm_stderr,none": 0.03642192783741706,
						"acc_stderr,none": 0.03642192783741706,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604674,
						"acc_stderr,none": 0.03893259610604674,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.0316293039569795,
						"acc_stderr,none": 0.0316293039569795,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25060827250608275,
						"acc_norm,none": 0.25060827250608275,
						"acc_norm_stderr,none": 0.021402288814095338,
						"acc_stderr,none": 0.021402288814095338,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.029761395837435988,
						"acc_stderr,none": 0.029761395837435988,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.23577235772357724,
						"acc_norm,none": 0.23577235772357724,
						"acc_norm_stderr,none": 0.03843066495214839,
						"acc_stderr,none": 0.03843066495214839,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2786885245901639,
						"acc_norm,none": 0.2786885245901639,
						"acc_norm_stderr,none": 0.040759446590692514,
						"acc_stderr,none": 0.040759446590692514,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.02985642316467189,
						"acc_stderr,none": 0.02985642316467189,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032364888900157734,
						"acc_stderr,none": 0.032364888900157734,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871163,
						"acc_stderr,none": 0.03152480234871163,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.22758620689655173,
						"acc_norm,none": 0.22758620689655173,
						"acc_norm_stderr,none": 0.03493950380131184,
						"acc_stderr,none": 0.03493950380131184,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2571428571428571,
						"acc_norm,none": 0.2571428571428571,
						"acc_norm_stderr,none": 0.04285714285714284,
						"acc_stderr,none": 0.04285714285714284,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.032888897342098225,
						"acc_stderr,none": 0.032888897342098225,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.24644549763033174,
						"acc_norm,none": 0.24644549763033174,
						"acc_norm_stderr,none": 0.02973775172659683,
						"acc_stderr,none": 0.02973775172659683,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.24202127659574468,
						"acc_norm,none": 0.24202127659574468,
						"acc_norm_stderr,none": 0.022117683921586983,
						"acc_stderr,none": 0.022117683921586983,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.24568965517241378,
						"acc_norm,none": 0.24568965517241378,
						"acc_norm_stderr,none": 0.028324514684171142,
						"acc_stderr,none": 0.028324514684171142,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.25287356321839083,
						"acc_norm,none": 0.25287356321839083,
						"acc_norm_stderr,none": 0.0330465186437516,
						"acc_stderr,none": 0.0330465186437516,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.2518518518518518,
						"acc_norm,none": 0.2518518518518518,
						"acc_norm_stderr,none": 0.03749850709174021,
						"acc_stderr,none": 0.03749850709174021,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.25663716814159293,
						"acc_norm,none": 0.25663716814159293,
						"acc_norm_stderr,none": 0.02911849599823729,
						"acc_stderr,none": 0.02911849599823729,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03346409881055953,
						"acc_stderr,none": 0.03346409881055953,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.03231470996617759,
						"acc_stderr,none": 0.03231470996617759,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323176,
						"acc_stderr,none": 0.03307162750323176,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2422360248447205,
						"acc_norm,none": 0.2422360248447205,
						"acc_norm_stderr,none": 0.03387086996153082,
						"acc_stderr,none": 0.03387086996153082,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.034893706520187605,
						"acc_stderr,none": 0.034893706520187605,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.11838753687089222,
						"mcc_stderr,none": 0.028047465563703095
					},
					"copa": {
						"acc,none": 0.73,
						"acc_stderr,none": 0.04461960433384741,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.651787045319022,
						"likelihood_diff_stderr,none": 0.5001289650727238,
						"pct_stereotype,none": 0.5178890876565295,
						"pct_stereotype_stderr,none": 0.09552933693924155
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.4083929636255217,
						"likelihood_diff_stderr,none": 0.08824559596862774,
						"pct_stereotype,none": 0.6118067978533095,
						"pct_stereotype_stderr,none": 0.011904032527924659
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.9945054945054945,
						"likelihood_diff_stderr,none": 0.3683678489367818,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.04865042554105199
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.409090909090909,
						"likelihood_diff_stderr,none": 1.5628036894955253,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.042307692307692,
						"likelihood_diff_stderr,none": 0.6564118214123973,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.05769230769230768
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.57734375,
						"likelihood_diff_stderr,none": 0.1783591341166465,
						"pct_stereotype,none": 0.634375,
						"pct_stereotype_stderr,none": 0.026964702306061943
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.3315972222222223,
						"likelihood_diff_stderr,none": 0.22893675213097084,
						"pct_stereotype,none": 0.5648148148148148,
						"pct_stereotype_stderr,none": 0.033812000056435254
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.2934027777777777,
						"likelihood_diff_stderr,none": 0.3274230253109938,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.21505905511811,
						"likelihood_diff_stderr,none": 0.14929251154662623,
						"pct_stereotype,none": 0.5039370078740157,
						"pct_stereotype_stderr,none": 0.02220509119300217
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.4583333333333335,
						"likelihood_diff_stderr,none": 0.38624397947982525,
						"pct_stereotype,none": 0.6936936936936937,
						"pct_stereotype_stderr,none": 0.04395066997351522
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.073924731182796,
						"likelihood_diff_stderr,none": 0.4649747127089346,
						"pct_stereotype,none": 0.8494623655913979,
						"pct_stereotype_stderr,none": 0.03728212869390004
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 3.835526315789474,
						"likelihood_diff_stderr,none": 0.2496978930611475,
						"pct_stereotype,none": 0.631578947368421,
						"pct_stereotype_stderr,none": 0.03508771929824559
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.8953115682766843,
						"likelihood_diff_stderr,none": 0.09407552232402548,
						"pct_stereotype,none": 0.4251639833035182,
						"pct_stereotype_stderr,none": 0.01207572493969688
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.6666666666666665,
						"likelihood_diff_stderr,none": 0.3268804238336138,
						"pct_stereotype,none": 0.4,
						"pct_stereotype_stderr,none": 0.05192907868894985
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 3.9038461538461537,
						"likelihood_diff_stderr,none": 1.3221299532996857,
						"pct_stereotype,none": 0.38461538461538464,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.363636363636363,
						"likelihood_diff_stderr,none": 0.5937019350728617,
						"pct_stereotype,none": 0.5151515151515151,
						"pct_stereotype_stderr,none": 0.06198888629778894
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.296728971962617,
						"likelihood_diff_stderr,none": 0.19018518770216078,
						"pct_stereotype,none": 0.48909657320872274,
						"pct_stereotype_stderr,none": 0.027944203070818633
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.645256916996048,
						"likelihood_diff_stderr,none": 0.23720634542312885,
						"pct_stereotype,none": 0.2924901185770751,
						"pct_stereotype_stderr,none": 0.02865639690849427
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 4.725694444444445,
						"likelihood_diff_stderr,none": 0.5741258520482906,
						"pct_stereotype,none": 0.4305555555555556,
						"pct_stereotype_stderr,none": 0.05876396677084613
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.523913043478261,
						"likelihood_diff_stderr,none": 0.16602229736720006,
						"pct_stereotype,none": 0.3239130434782609,
						"pct_stereotype_stderr,none": 0.02184284250053262
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.984782608695652,
						"likelihood_diff_stderr,none": 0.40476856959565294,
						"pct_stereotype,none": 0.6173913043478261,
						"pct_stereotype_stderr,none": 0.04552031372871532
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.3214285714285716,
						"likelihood_diff_stderr,none": 0.3001903576541668,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.052548465466459485
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.264668367346939,
						"likelihood_diff_stderr,none": 0.32566292946468706,
						"pct_stereotype,none": 0.5204081632653061,
						"pct_stereotype_stderr,none": 0.03577590557703757
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.038385826771653545,
						"exact_match_stderr,none": 0.004263152964967611
					},
					"glue": {
						"acc,none": 0.4686464223210568,
						"acc_stderr,none": 0.05654791396954022,
						"alias": "glue",
						"f1,none": 0.3409102870116671,
						"f1_stderr,none": 0.0018230856522465648,
						"mcc,none": -0.11838753687089222,
						"mcc_stderr,none": 0.000786660324547111
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.008339651250947688,
						"exact_match_stderr,get-answer": 0.002504942226860512
					},
					"hellaswag": {
						"acc,none": 0.46683927504481176,
						"acc_norm,none": 0.6166102370045807,
						"acc_norm_stderr,none": 0.004852182621274259,
						"acc_stderr,none": 0.004978795454216714,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.09962460294542304,
						"acc_norm,none": 0.09962460294542304,
						"acc_norm_stderr,none": 0.06323865722618321,
						"acc_stderr,none": 0.06323865722618321,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.19,
						"acc_norm,none": 0.19,
						"acc_norm_stderr,none": 0.03942772444036623,
						"acc_stderr,none": 0.03942772444036623,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.09,
						"acc_norm,none": 0.09,
						"acc_norm_stderr,none": 0.009054390204866439,
						"acc_stderr,none": 0.009054390204866439,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.074,
						"acc_norm,none": 0.074,
						"acc_norm_stderr,none": 0.008282064512704159,
						"acc_stderr,none": 0.008282064512704159,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.193,
						"acc_norm,none": 0.193,
						"acc_norm_stderr,none": 0.012486268734370098,
						"acc_stderr,none": 0.012486268734370098,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.194,
						"acc_norm,none": 0.194,
						"acc_norm_stderr,none": 0.01251081614126438,
						"acc_stderr,none": 0.01251081614126438,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.175,
						"acc_norm,none": 0.175,
						"acc_norm_stderr,none": 0.01552503498177411,
						"acc_stderr,none": 0.01552503498177411,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.01,
						"acc_norm,none": 0.01,
						"acc_norm_stderr,none": 0.0031480009386767754,
						"acc_stderr,none": 0.0031480009386767754,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.018,
						"acc_norm,none": 0.018,
						"acc_norm_stderr,none": 0.004206387249611464,
						"acc_stderr,none": 0.004206387249611464,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.021,
						"acc_norm,none": 0.021,
						"acc_norm_stderr,none": 0.004536472151306499,
						"acc_stderr,none": 0.004536472151306499,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.215,
						"acc_norm,none": 0.215,
						"acc_norm_stderr,none": 0.02912242397001744,
						"acc_stderr,none": 0.02912242397001744,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.035,
						"acc_norm,none": 0.035,
						"acc_norm_stderr,none": 0.005814534272734977,
						"acc_stderr,none": 0.005814534272734977,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.040347329239296424,
						"acc_stderr,none": 0.040347329239296424,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.019,
						"acc_norm,none": 0.019,
						"acc_norm_stderr,none": 0.004319451082910613,
						"acc_stderr,none": 0.004319451082910613,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.024,
						"acc_norm,none": 0.024,
						"acc_norm_stderr,none": 0.004842256441727063,
						"acc_stderr,none": 0.004842256441727063,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.012155153135511949,
						"acc_stderr,none": 0.012155153135511949,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.033,
						"acc_norm,none": 0.033,
						"acc_norm_stderr,none": 0.005651808820452375,
						"acc_stderr,none": 0.005651808820452375,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.134,
						"acc_norm,none": 0.134,
						"acc_norm_stderr,none": 0.010777762298369676,
						"acc_stderr,none": 0.010777762298369676,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.117,
						"acc_norm,none": 0.117,
						"acc_norm_stderr,none": 0.010169287802713329,
						"acc_stderr,none": 0.010169287802713329,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.079,
						"acc_norm,none": 0.079,
						"acc_norm_stderr,none": 0.008534156773333454,
						"acc_stderr,none": 0.008534156773333454,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.072,
						"acc_norm,none": 0.072,
						"acc_norm_stderr,none": 0.008178195576218681,
						"acc_stderr,none": 0.008178195576218681,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.041633319989322695,
						"acc_stderr,none": 0.041633319989322695,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.025,
						"acc_norm,none": 0.025,
						"acc_norm_stderr,none": 0.004939574819698469,
						"acc_stderr,none": 0.004939574819698469,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.033,
						"acc_norm,none": 0.033,
						"acc_norm_stderr,none": 0.00565180882045237,
						"acc_stderr,none": 0.00565180882045237,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.058,
						"acc_norm,none": 0.058,
						"acc_norm_stderr,none": 0.007395315455792944,
						"acc_stderr,none": 0.007395315455792944,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.229,
						"acc_norm,none": 0.229,
						"acc_norm_stderr,none": 0.013294199326613604,
						"acc_stderr,none": 0.013294199326613604,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.074,
						"acc_norm,none": 0.074,
						"acc_norm_stderr,none": 0.008282064512704163,
						"acc_stderr,none": 0.008282064512704163,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.182,
						"acc_norm,none": 0.182,
						"acc_norm_stderr,none": 0.012207580637662179,
						"acc_stderr,none": 0.012207580637662179,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.13666666666666666,
						"acc_norm,none": 0.13666666666666666,
						"acc_norm_stderr,none": 0.014034829611310275,
						"acc_stderr,none": 0.014034829611310275,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.104,
						"acc_norm,none": 0.104,
						"acc_norm_stderr,none": 0.009658016218524289,
						"acc_stderr,none": 0.009658016218524289,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.108,
						"acc_norm,none": 0.108,
						"acc_norm_stderr,none": 0.009820001651345707,
						"acc_stderr,none": 0.009820001651345707,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.058,
						"acc_norm,none": 0.058,
						"acc_norm_stderr,none": 0.007395315455792951,
						"acc_stderr,none": 0.007395315455792951,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.079,
						"acc_norm,none": 0.079,
						"acc_norm_stderr,none": 0.008534156773333449,
						"acc_stderr,none": 0.008534156773333449,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.26,
						"acc_norm,none": 0.26,
						"acc_norm_stderr,none": 0.0440844002276808,
						"acc_stderr,none": 0.0440844002276808,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.02395648228514077,
						"acc_stderr,none": 0.02395648228514077,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.237,
						"acc_norm,none": 0.237,
						"acc_norm_stderr,none": 0.01345407046257794,
						"acc_stderr,none": 0.01345407046257794,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.041,
						"acc_norm,none": 0.041,
						"acc_norm_stderr,none": 0.006273624021118784,
						"acc_stderr,none": 0.006273624021118784,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.113,
						"acc_norm,none": 0.113,
						"acc_norm_stderr,none": 0.010016552866696839,
						"acc_stderr,none": 0.010016552866696839,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.02752568467055655,
						"acc_stderr,none": 0.02752568467055655,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.137,
						"acc_norm,none": 0.137,
						"acc_norm_stderr,none": 0.0108788487143333,
						"acc_stderr,none": 0.0108788487143333,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.152,
						"acc_norm,none": 0.152,
						"acc_norm_stderr,none": 0.011358918303475279,
						"acc_stderr,none": 0.011358918303475279,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.027234326551496855,
						"acc_stderr,none": 0.027234326551496855,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.029,
						"acc_norm,none": 0.029,
						"acc_norm_stderr,none": 0.005309160685756992,
						"acc_stderr,none": 0.005309160685756992,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.4733611050208288,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.0004959919839679348,
						"acc_stderr,none": 0.04266767895177174,
						"alias": "kobest",
						"f1,none": 0.36279047110467133,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.01580663942303517,
						"alias": " - kobest_copa",
						"f1,none": 0.479248034161329,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.324,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.022270877485360444,
						"acc_stderr,none": 0.02095055731247745,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3223257693644722,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.49622166246851385,
						"acc_stderr,none": 0.025125227983562776,
						"alias": " - kobest_sentineg",
						"f1,none": 0.33164983164983164,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5221230351251698,
						"acc_stderr,none": 0.01487175982410965,
						"alias": "lambada",
						"perplexity,none": 8.747892506290377,
						"perplexity_stderr,none": 0.7285467462700776
					},
					"lambada_cloze": {
						"acc,none": 0.020473510576363284,
						"acc_stderr,none": 0.004052993270403501,
						"alias": "lambada_cloze",
						"perplexity,none": 511.6365866558847,
						"perplexity_stderr,none": 97.5105304587382
					},
					"lambada_multilingual": {
						"acc,none": 0.2400543372792548,
						"acc_stderr,none": 0.09088143462697339,
						"alias": "lambada_multilingual",
						"perplexity,none": 662.8674557562039,
						"perplexity_stderr,none": 243.04118466571217
					},
					"lambada_openai": {
						"acc,none": 0.549776829031632,
						"acc_stderr,none": 0.00693137203883537,
						"alias": " - lambada_openai",
						"perplexity,none": 7.3760781814578875,
						"perplexity_stderr,none": 0.18775687331294497
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.027556763050650107,
						"acc_stderr,none": 0.002280648227904733,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 319.5293898190276,
						"perplexity_stderr,none": 10.189615414835425
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.13623132156025616,
						"acc_stderr,none": 0.004779134835110424,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 961.1542225709702,
						"perplexity_stderr,none": 60.327400537860576
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.5491946438967592,
						"acc_stderr,none": 0.0069321787551013935,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 7.372908241567056,
						"perplexity_stderr,none": 0.18740628120738217
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.15854841839705025,
						"acc_stderr,none": 0.005088708755423652,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 933.2560650502841,
						"perplexity_stderr,none": 57.51170785385776
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.20337667378226276,
						"acc_stderr,none": 0.005607756564321304,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 414.7305797007161,
						"perplexity_stderr,none": 24.289237009463402
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.15292062875994566,
						"acc_stderr,none": 0.005014263623452837,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 997.8235032174816,
						"perplexity_stderr,none": 64.29920564246147
					},
					"lambada_standard": {
						"acc,none": 0.49582767320007765,
						"acc_stderr,none": 0.006965735121159854,
						"alias": " - lambada_standard",
						"perplexity,none": 10.12685379832158,
						"perplexity_stderr,none": 0.27500102004120697
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.01339025810207646,
						"acc_stderr,none": 0.0016013241251574709,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 703.7437834927417,
						"perplexity_stderr,none": 21.411567438249488
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.26017811704834604,
						"exact_match_stderr,get-answer": 0.011069063455907864
					},
					"logiqa": {
						"acc,none": 0.2073732718894009,
						"acc_norm,none": 0.2749615975422427,
						"acc_norm_stderr,none": 0.017512971782225207,
						"acc_stderr,none": 0.01590208491387634,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2340966921119593,
						"acc_norm,none": 0.28498727735368956,
						"acc_norm_stderr,none": 0.011388893410930615,
						"acc_stderr,none": 0.010683080933862762,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.23517587939698492,
						"acc_norm,none": 0.23919597989949748,
						"acc_norm_stderr,none": 0.007809332748857671,
						"acc_stderr,none": 0.0077638612776946255,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3567040881169244,
						"acc_stderr,none": 0.004930039969836445,
						"alias": "mc_taco",
						"f1,none": 0.5030273277695958,
						"f1_stderr,none": 0.005539965906996835
					},
					"medmcqa": {
						"acc,none": 0.3067176667463543,
						"acc_norm,none": 0.3067176667463543,
						"acc_norm_stderr,none": 0.007130704645763214,
						"acc_stderr,none": 0.007130704645763214,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.2717989002356638,
						"acc_norm,none": 0.2717989002356638,
						"acc_norm_stderr,none": 0.012474006245515923,
						"acc_stderr,none": 0.012474006245515923,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.2486113089303518,
						"acc_stderr,none": 0.040856044284379195,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.28,
						"acc_stderr,none": 0.045126085985421276,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.23703703703703705,
						"acc_stderr,none": 0.03673731683969506,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.21052631578947367,
						"acc_stderr,none": 0.03317672787533158,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.044619604333847394,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.22264150943396227,
						"acc_stderr,none": 0.0256042334708991,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2569444444444444,
						"acc_stderr,none": 0.03653946969442099,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.18,
						"acc_stderr,none": 0.03861229196653694,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932268,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.27167630057803466,
						"acc_stderr,none": 0.03391750322321659,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.20588235294117646,
						"acc_stderr,none": 0.04023382273617746,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.32340425531914896,
						"acc_stderr,none": 0.030579442773610334,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.22807017543859648,
						"acc_stderr,none": 0.03947152782669415,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2482758620689655,
						"acc_stderr,none": 0.03600105692727774,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.022019080012217904,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.21428571428571427,
						"acc_stderr,none": 0.03670066451047181,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.39,
						"acc_stderr,none": 0.04902071300001974,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2064516129032258,
						"acc_stderr,none": 0.02302589961718872,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.26108374384236455,
						"acc_stderr,none": 0.030903796952114475,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.23636363636363636,
						"acc_stderr,none": 0.03317505930009179,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.20202020202020202,
						"acc_stderr,none": 0.028606204289229876,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.24870466321243523,
						"acc_stderr,none": 0.031195840877700286,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.020280805062535726,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.26296296296296295,
						"acc_stderr,none": 0.02684205787383371,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.23109243697478993,
						"acc_stderr,none": 0.027381406927868966,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.21834862385321102,
						"acc_stderr,none": 0.017712600528722734,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_stderr,none": 0.025416428388767478,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.20588235294117646,
						"acc_stderr,none": 0.028379449451588667,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.27848101265822783,
						"acc_stderr,none": 0.029178682304842565,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.2914798206278027,
						"acc_stderr,none": 0.030500283176545916,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.3282442748091603,
						"acc_stderr,none": 0.04118438565806298,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.24675876726886292,
						"acc_stderr,none": 0.031094799064549938,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.04065578140908705,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2962962962962963,
						"acc_stderr,none": 0.04414343666854933,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.25153374233128833,
						"acc_stderr,none": 0.034089978868575295,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.29464285714285715,
						"acc_stderr,none": 0.0432704093257873,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.1650485436893204,
						"acc_stderr,none": 0.036756688322331886,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.24786324786324787,
						"acc_stderr,none": 0.028286324075564393,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.04688261722621505,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.26947637292464877,
						"acc_stderr,none": 0.015866243073215044,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.26878612716763006,
						"acc_stderr,none": 0.023868003262500118,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.24581005586592178,
						"acc_stderr,none": 0.014400296429225601,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.023805186524888167,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2658513035082073,
						"acc_stderr,none": 0.04392059667120125,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.19614147909967847,
						"acc_stderr,none": 0.022552447780478036,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2037037037037037,
						"acc_stderr,none": 0.022409674547304193,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.30851063829787234,
						"acc_stderr,none": 0.02755336616510137,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2627118644067797,
						"acc_stderr,none": 0.01124054551499566,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.22058823529411764,
						"acc_stderr,none": 0.02518778666022727,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.27124183006535946,
						"acc_stderr,none": 0.017986615304030305,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.038313051408846034,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.025607375986579157,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.23691907702307444,
						"acc_stderr,none": 0.040415046664276935,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.25870646766169153,
						"acc_stderr,none": 0.030965903123573037,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2457976530288614,
						"acc_stderr,none": 0.04775804508575561,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.03680783690727581,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.2222222222222222,
						"acc_stderr,none": 0.03188578017686398,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3662761079979623,
						"acc_stderr,none": 0.0048633027820871476,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.36696501220504474,
						"acc_stderr,none": 0.004861019222910397,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6715686274509803,
						"acc_stderr,none": 0.0232793212154491,
						"alias": "mrpc",
						"f1,none": 0.8011869436201781,
						"f1_stderr,none": 0.01682560779726363
					},
					"multimedqa": {
						"acc,none": 0.31540099361249113,
						"acc_norm,none": 0.29453016370441626,
						"acc_norm_stderr,none": 0.000121755179501889,
						"acc_stderr,none": 0.08101958172475976,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5719884488448845,
						"acc_stderr,none": 0.007106976252751528,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6637509423844163,
						"mrr_stderr,none": 0.010278092668816506,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.45146726862302483,
						"r@2_stderr,none": 0.016727951978179466
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6285741178836026,
						"mrr_stderr,none": 0.010450684275013165,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.4706546275395034,
						"r@2_stderr,none": 0.016778343895001428
					},
					"openbookqa": {
						"acc,none": 0.244,
						"acc_norm,none": 0.348,
						"acc_norm_stderr,none": 0.0213237286328075,
						"acc_stderr,none": 0.01922673489361458,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.011179914813969712,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.457,
						"acc_stderr,none": 0.0111417040341408,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4665,
						"acc_stderr,none": 0.01115800723977081,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.541,
						"acc_stderr,none": 0.011145474902641254,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.557,
						"acc_stderr,none": 0.0111102303580667,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.011122493197456285,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5545,
						"acc_stderr,none": 0.01111650409668739,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5199999999999999,
						"acc_stderr,none": 0.030346024059203204,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7562568008705114,
						"acc_norm,none": 0.7464635473340587,
						"acc_norm_stderr,none": 0.01015009083455178,
						"acc_stderr,none": 0.010017199471500616,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.25421648163962424,
						"acc_norm,none": 0.27914175918018785,
						"acc_norm_stderr,none": 0.0032772569314958797,
						"acc_stderr,none": 0.003181129827861863,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.628,
						"acc_stderr,none": 0.0216371979857224,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7047725619336289,
						"acc_norm,none": 0.49567593073624916,
						"acc_norm_stderr,none": 0.004671941763959614,
						"acc_stderr,none": 0.15851846314770499,
						"alias": "pythia",
						"bits_per_byte,none": 0.7637309010984497,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6978757668324775,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 7.3760781814578875,
						"perplexity_stderr,none": 0.18775687331294497,
						"word_perplexity,none": 16.959400823743298,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3971631205673759,
						"acc_norm_stderr,none": 0.050466078388769486,
						"acc_stderr,none": 0.04103293115145991,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.38333333333333336,
						"acc_norm,none": 0.48333333333333334,
						"acc_norm_stderr,none": 0.04580945392704764,
						"acc_stderr,none": 0.04456973469931286,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.28125,
						"acc_norm,none": 0.39375,
						"acc_norm_stderr,none": 0.03874695666685831,
						"acc_stderr,none": 0.03565632932250201,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3415492957746479,
						"acc_norm,none": 0.3626760563380282,
						"acc_norm_stderr,none": 0.028578954826942813,
						"acc_stderr,none": 0.028190002383528694,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5024711696869851,
						"acc_stderr,none": 0.0067653279228825055,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5073460301756122,
						"acc_stderr,none": 0.0024864322317896667,
						"alias": "qqp",
						"f1,none": 0.33708313918658056,
						"f1_stderr,none": 0.0035162197199105323
					},
					"race": {
						"acc,none": 0.3626794258373206,
						"acc_stderr,none": 0.014879563111287502,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5703971119133574,
						"acc_stderr,none": 0.02979666882912467,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.902,
						"acc_norm,none": 0.849,
						"acc_norm_stderr,none": 0.01132816522334168,
						"acc_stderr,none": 0.009406619184621219,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5703971119133574,
						"acc_stderr,none": 0.02979666882912467,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.5825688073394495,
						"acc_stderr,none": 0.01670925110210733,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5133459962011396,
						"acc_norm,none": 0.6990402879136259,
						"acc_norm_stderr,none": 0.003242918293697598,
						"acc_stderr,none": 0.0035338325124732815,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5268044324648098,
						"acc_stderr,none": 0.04061405929928613,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5985576923076923,
						"acc_stderr,none": 0.0049060734171570706,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.445424141076315,
						"acc_stderr,none": 0.005003763793427974,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5352941176470588,
						"acc_stderr,none": 0.004938630390133722,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.31329620142995696,
						"acc_stderr,none": 0.04710751277400382,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2827417380660955,
						"bleu_acc_stderr,none": 0.0002485279995361041,
						"bleu_diff,none": -8.823006485159095,
						"bleu_diff_stderr,none": 0.5504856625018505,
						"bleu_max,none": 22.42911157053935,
						"bleu_max_stderr,none": 0.5141825023827793,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.00024182670675721563,
						"rouge1_diff,none": -10.907633820974988,
						"rouge1_diff_stderr,none": 0.668917888327363,
						"rouge1_max,none": 47.29833255797588,
						"rouge1_max_stderr,none": 0.7273489736793618,
						"rouge2_acc,none": 0.18604651162790697,
						"rouge2_acc_stderr,none": 0.00018557991070955286,
						"rouge2_diff,none": -13.584114530496528,
						"rouge2_diff_stderr,none": 0.8918655242742985,
						"rouge2_max,none": 30.231307567989813,
						"rouge2_max_stderr,none": 0.9151376823291036,
						"rougeL_acc,none": 0.24479804161566707,
						"rougeL_acc_stderr,none": 0.00022655877504510204,
						"rougeL_diff,none": -11.400076809718822,
						"rougeL_diff_stderr,none": 0.6788849097243875,
						"rougeL_max,none": 44.417870376655785,
						"rougeL_max_stderr,none": 0.7300331328268088
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2827417380660955,
						"bleu_acc_stderr,none": 0.0157647708367773,
						"bleu_diff,none": -8.823006485159095,
						"bleu_diff_stderr,none": 0.7419472100505874,
						"bleu_max,none": 22.42911157053935,
						"bleu_max_stderr,none": 0.717065200928604,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.015550778332842881,
						"rouge1_diff,none": -10.907633820974988,
						"rouge1_diff_stderr,none": 0.8178740051666656,
						"rouge1_max,none": 47.29833255797588,
						"rouge1_max_stderr,none": 0.8528475676692534,
						"rouge2_acc,none": 0.18604651162790697,
						"rouge2_acc_stderr,none": 0.013622771770442051,
						"rouge2_diff,none": -13.584114530496528,
						"rouge2_diff_stderr,none": 0.9443863215201174,
						"rouge2_max,none": 30.231307567989813,
						"rouge2_max_stderr,none": 0.9566282884846672,
						"rougeL_acc,none": 0.24479804161566707,
						"rougeL_acc_stderr,none": 0.015051869486714999,
						"rougeL_diff,none": -11.400076809718822,
						"rougeL_diff_stderr,none": 0.8239447249205419,
						"rougeL_max,none": 44.417870376655785,
						"rougeL_max_stderr,none": 0.8544197638320458
					},
					"truthfulqa_mc1": {
						"acc,none": 0.2178702570379437,
						"acc_stderr,none": 0.014450846714123904,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.360926625284001,
						"acc_stderr,none": 0.013710059095876529,
						"alias": "truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.038385826771653545,
						"exact_match_stderr,none": 0.004263152964967611
					},
					"wic": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.01981072129375818,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.7637309010984497,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.6978757668324775,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 16.959400823743298,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6108918705603789,
						"acc_stderr,none": 0.013702520871485952,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.5970695970695971,
						"acc_stderr,none": 0.029740142541686037,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5225454545454544,
						"acc_stderr,none": 0.029445850681849933,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.02238289498648353,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.022324981738385256,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.022361396739207867,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.484,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.0223572738810164,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.564,
						"acc_stderr,none": 0.0221989546414768,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.022261697292270132,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.022361396739207878,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.3538688085676037,
						"acc_stderr,none": 0.040343837101509725,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3325301204819277,
						"acc_stderr,none": 0.009443193365903336,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3401606425702811,
						"acc_stderr,none": 0.009496174608136404,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.3485943775100402,
						"acc_stderr,none": 0.00955154205330181,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3385542168674699,
						"acc_stderr,none": 0.009485250208516883,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5188755020080321,
						"acc_stderr,none": 0.010014928901071299,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.3461847389558233,
						"acc_stderr,none": 0.009536061379898333,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.3642570281124498,
						"acc_stderr,none": 0.009645667910246845,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3389558232931727,
						"acc_stderr,none": 0.009487992732201519,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.3429718875502008,
						"acc_stderr,none": 0.009514999934033461,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3409638554216867,
						"acc_stderr,none": 0.009501591178361546,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3369477911646586,
						"acc_stderr,none": 0.009474203778757715,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3357429718875502,
						"acc_stderr,none": 0.009465838617337357,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3441767068273092,
						"acc_stderr,none": 0.009522954469806036,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3453815261044177,
						"acc_stderr,none": 0.009530841175865182,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667058,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5094759641417483,
						"acc_stderr,none": 0.056903140792870545,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.47121111846459296,
						"acc_stderr,none": 0.012845779070719487,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.6962276637988087,
						"acc_stderr,none": 0.011834809582513097,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5122435473196558,
						"acc_stderr,none": 0.012863267059205548,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.500330906684315,
						"acc_stderr,none": 0.012867122498493417,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.4771674387822634,
						"acc_stderr,none": 0.012853702384870849,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.49040370615486434,
						"acc_stderr,none": 0.01286475526040896,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4632693580410324,
						"acc_stderr,none": 0.012832359240206969,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.4811383189940437,
						"acc_stderr,none": 0.012857966762465,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.4884182660489742,
						"acc_stderr,none": 0.012863672949335889,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.513567174056916,
						"acc_stderr,none": 0.01286238758665008,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5102581072137657,
						"acc_stderr,none": 0.012864417047980468,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.6745336030568667,
						"acc_stderr,none": 0.09162336475916147,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8094623655913978,
						"acc_stderr,none": 0.008146492341553306,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.5662650602409639,
						"acc_stderr,none": 0.05472870359742141,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.4869655891553702,
						"acc_stderr,none": 0.016148776724612658,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.5399239543726235,
						"acc_stderr,none": 0.030791472862142358,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5492063492063493,
						"acc_stderr,none": 0.028079660068225116,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.5753968253968254,
						"acc_stderr,none": 0.022038973193044563,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "tiiuae/falcon-rw-1b"
	},
	"tiiuae/falcon-rw-7b": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6020293122886133,
						"acc_norm,none": 0.584554678692221,
						"acc_norm_stderr,none": 0.04674906145819305,
						"acc_stderr,none": 0.0558635575666503,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.334375,
						"acc_stderr,none": 0.015769027360262897,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.06690000000000002,
						"acc_stderr,none": 0.08886431019504039,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8234477611940298,
						"acc_stderr,none": 0.16693819168615384,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.25111441307578014,
						"acc_norm,none": 0.25111441307578014,
						"acc_norm_stderr,none": 0.11374041136631409,
						"acc_stderr,none": 0.11374041136631409,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.24926610257295803,
						"acc_norm,none": 0.24926610257295803,
						"acc_norm_stderr,none": 0.03432229828879208,
						"acc_stderr,none": 0.03432229828879208,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.7202966607036374,
						"likelihood_diff_stderr,none": 0.49782881908424564,
						"pct_stereotype,none": 0.5755813953488372,
						"pct_stereotype_stderr,none": 0.08976558079188013
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.04429133858267716,
						"exact_match_stderr,none": 0.004565277720393179
					},
					"glue": {
						"acc,none": 0.4824807416097276,
						"acc_stderr,none": 0.06259412995558915,
						"alias": "glue",
						"f1,none": 0.32880047166047305,
						"f1_stderr,none": 0.0001643198371930269,
						"mcc,none": -0.021814080973054845,
						"mcc_stderr,none": 0.0010239051621994302
					},
					"kmmlu": {
						"acc,none": 0.10950043315044758,
						"acc_norm,none": 0.10950043315044758,
						"acc_norm_stderr,none": 0.0631587889354843,
						"acc_stderr,none": 0.0631587889354843,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.4753343565007674,
						"acc_norm,none": 0.414,
						"acc_norm_stderr,none": 0.0004861803607214433,
						"acc_stderr,none": 0.04927821824566108,
						"alias": "kobest",
						"f1,none": 0.37824953467700784,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6187657675140694,
						"acc_stderr,none": 0.009215457834445401,
						"alias": "lambada",
						"perplexity,none": 4.973804519967448,
						"perplexity_stderr,none": 0.13571139943914853
					},
					"lambada_cloze": {
						"acc,none": 0.01620415292062876,
						"acc_stderr,none": 0.005940749331425778,
						"alias": "lambada_cloze",
						"perplexity,none": 315.1307618604535,
						"perplexity_stderr,none": 85.28465503213083
					},
					"lambada_multilingual": {
						"acc,none": 0.33277702309334367,
						"acc_stderr,none": 0.08799445223480894,
						"alias": "lambada_multilingual",
						"perplexity,none": 156.89292796722805,
						"perplexity_stderr,none": 56.33957812323677
					},
					"mmlu": {
						"acc,none": 0.25601766130180886,
						"acc_stderr,none": 0.03869503108293753,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.25781083953241235,
						"acc_stderr,none": 0.03292475205414248,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.27615062761506276,
						"acc_stderr,none": 0.03855567726875051,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.24666883327916803,
						"acc_stderr,none": 0.03719880337640812,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.24262607040913417,
						"acc_stderr,none": 0.04341507444364946,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.30716820440028386,
						"acc_norm,none": 0.28695699660997015,
						"acc_norm_stderr,none": 0.00010657522943684276,
						"acc_stderr,none": 0.0863674796567071,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.5025714285714286,
						"acc_stderr,none": 0.0412607911018146,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.714081943809347,
						"acc_norm,none": 0.5903135571224072,
						"acc_norm_stderr,none": 0.004787723009101459,
						"acc_stderr,none": 0.1561891820302747,
						"alias": "pythia",
						"bits_per_byte,none": 0.6674379127382432,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5882498828646967,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.82564156138582,
						"perplexity_stderr,none": 0.10711724981923416,
						"word_perplexity,none": 11.868710493412717,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.41843971631205673,
						"acc_norm,none": 0.4716312056737589,
						"acc_norm_stderr,none": 0.07363433183038441,
						"acc_stderr,none": 0.05397767592522113,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5085687664304016,
						"acc_stderr,none": 0.04771180119582942,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.2944805608196479,
						"acc_stderr,none": 0.0012737193650940938,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31211750305997554,
						"bleu_acc_stderr,none": 0.016220756769520926,
						"bleu_diff,none": -8.059779723878206,
						"bleu_diff_stderr,none": 0.8413101248559008,
						"bleu_max,none": 23.07021704235888,
						"bleu_max_stderr,none": 0.7438478606491113,
						"rouge1_acc,none": 0.2631578947368421,
						"rouge1_acc_stderr,none": 0.015415241740237009,
						"rouge1_diff,none": -10.724846673037314,
						"rouge1_diff_stderr,none": 0.9724663278923614,
						"rouge1_max,none": 46.88371865051904,
						"rouge1_max_stderr,none": 0.8935264336981529,
						"rouge2_acc,none": 0.20563035495716034,
						"rouge2_acc_stderr,none": 0.014148482219460959,
						"rouge2_diff,none": -12.936729844219554,
						"rouge2_diff_stderr,none": 1.1326910481164179,
						"rouge2_max,none": 30.369314980769538,
						"rouge2_max_stderr,none": 1.0076773361570666,
						"rougeL_acc,none": 0.2484700122399021,
						"rougeL_acc_stderr,none": 0.015127427096520674,
						"rougeL_diff,none": -10.8083416385852,
						"rougeL_diff_stderr,none": 0.9800733132645568,
						"rougeL_max,none": 44.22759755647022,
						"rougeL_max_stderr,none": 0.8955574192952942
					},
					"xcopa": {
						"acc,none": 0.5358181818181819,
						"acc_stderr,none": 0.03318465937871164,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.36331994645247656,
						"acc_stderr,none": 0.043604825167842365,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5372119607725167,
						"acc_stderr,none": 0.07047680522091254,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7143178242301641,
						"acc_stderr,none": 0.0906365053279996,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6020293122886133,
						"acc_norm,none": 0.584554678692221,
						"acc_norm_stderr,none": 0.04674906145819305,
						"acc_stderr,none": 0.0558635575666503,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.334375,
						"acc_stderr,none": 0.015769027360262897,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.321,
						"acc_stderr,none": 0.014770821817934633,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.331,
						"acc_stderr,none": 0.014888272588203936,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.34833333333333333,
						"acc_stderr,none": 0.013759437498874075,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3660409556313993,
						"acc_norm,none": 0.3890784982935154,
						"acc_norm_stderr,none": 0.014247309976045607,
						"acc_stderr,none": 0.01407722310847014,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7184343434343434,
						"acc_norm,none": 0.680976430976431,
						"acc_norm_stderr,none": 0.00956413324944107,
						"acc_stderr,none": 0.00922893476451929,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.06690000000000002,
						"acc_stderr,none": 0.08886431019504039,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.034,
						"acc_stderr,none": 0.004053420174069583,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.099,
						"acc_stderr,none": 0.006679955905951312,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.1535,
						"acc_stderr,none": 0.008062343064385401,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.3555,
						"acc_stderr,none": 0.010705941508891127,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.003,
						"acc_stderr,none": 0.00122321221546471,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0235,
						"acc_stderr,none": 0.0033881580257424816,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.0005000000000000151,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.005206073752711497,
						"acc_stderr,none": 0.0014992721829171637,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8234477611940298,
						"acc_stderr,none": 0.16693819168615384,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.00896305396259208,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437664,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.0024433521993298506,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.012559527926707378,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.00868051561552373,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.013607356839598121,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.591,
						"acc_stderr,none": 0.015555094373257946,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.795,
						"acc_stderr,none": 0.01277255409611312,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946095,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.0026377941462437677,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.985,
						"acc_stderr,none": 0.003845749574503009,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.967,
						"acc_stderr,none": 0.005651808820452368,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.968,
						"acc_stderr,none": 0.005568393575081364,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.006199874066337074,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942298,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.949,
						"acc_stderr,none": 0.006960420062571412,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.978,
						"acc_stderr,none": 0.004640855259274701,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.909,
						"acc_stderr,none": 0.00909954953840023,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.732,
						"acc_stderr,none": 0.01401329270272949,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.698,
						"acc_stderr,none": 0.01452608023545955,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.835,
						"acc_stderr,none": 0.011743632866916168,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.934,
						"acc_stderr,none": 0.0078552979386976,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.011203415395160331,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.004742730594656804,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.307,
						"acc_stderr,none": 0.014593284892852623,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651523,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.012461592646659978,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.658,
						"acc_stderr,none": 0.015008706182121731,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.013232501619085337,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.982,
						"acc_stderr,none": 0.004206387249611491,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.008072494358323492,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.912,
						"acc_stderr,none": 0.008963053962592078,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333354,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.639,
						"acc_stderr,none": 0.015195720118175115,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336666,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.357,
						"acc_stderr,none": 0.015158521721486773,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.015204840912919501,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.587,
						"acc_stderr,none": 0.015577986829936533,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.886,
						"acc_stderr,none": 0.010055103435823332,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.638,
						"acc_stderr,none": 0.0152048409129195,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.908,
						"acc_stderr,none": 0.009144376393151113,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.915,
						"acc_stderr,none": 0.008823426366942331,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.774,
						"acc_stderr,none": 0.013232501619085337,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792939,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.001413505570557811,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.009899393819724439,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.861,
						"acc_stderr,none": 0.010945263761042974,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.413,
						"acc_stderr,none": 0.015577986829936531,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.92,
						"acc_stderr,none": 0.00858333697775365,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.0029879638431426557,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.773,
						"acc_stderr,none": 0.013253174964763909,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.459,
						"acc_stderr,none": 0.01576602573788216,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.00932045443478323,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.887,
						"acc_stderr,none": 0.010016552866696869,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.609,
						"acc_stderr,none": 0.015438826294681783,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.883,
						"acc_stderr,none": 0.010169287802713327,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.878,
						"acc_stderr,none": 0.010354864712936694,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.816,
						"acc_stderr,none": 0.012259457340938588,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.853,
						"acc_stderr,none": 0.01120341539516033,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.95,
						"acc_stderr,none": 0.006895472974897876,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.897,
						"acc_stderr,none": 0.009616833339695801,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.977,
						"acc_stderr,none": 0.0047427305946568,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.955,
						"acc_stderr,none": 0.0065588122414061145,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.344,
						"acc_stderr,none": 0.015029633724408945,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.314,
						"acc_stderr,none": 0.01468399195108797,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6859327217125383,
						"acc_stderr,none": 0.008117917728841496,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.32142857142857145,
						"acc_stderr,none": 0.06297362289056342,
						"alias": "cb",
						"f1,none": 0.1949934123847167,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.25111441307578014,
						"acc_norm,none": 0.25111441307578014,
						"acc_norm_stderr,none": 0.11374041136631409,
						"acc_stderr,none": 0.11374041136631409,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.16326530612244897,
						"acc_norm,none": 0.16326530612244897,
						"acc_norm_stderr,none": 0.053348255582850765,
						"acc_stderr,none": 0.053348255582850765,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.06818181818181816,
						"acc_stderr,none": 0.06818181818181816,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.1276595744680851,
						"acc_norm,none": 0.1276595744680851,
						"acc_norm_stderr,none": 0.04920290896196925,
						"acc_stderr,none": 0.04920290896196925,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0606060606060606,
						"acc_stderr,none": 0.0606060606060606,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.35135135135135137,
						"acc_norm,none": 0.35135135135135137,
						"acc_norm_stderr,none": 0.07956541321016082,
						"acc_stderr,none": 0.07956541321016082,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.08539125638299665,
						"acc_stderr,none": 0.08539125638299665,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.3793103448275862,
						"acc_norm,none": 0.3793103448275862,
						"acc_norm_stderr,none": 0.09169709590633639,
						"acc_stderr,none": 0.09169709590633639,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2972972972972973,
						"acc_norm,none": 0.2972972972972973,
						"acc_norm_stderr,none": 0.07617808344724214,
						"acc_stderr,none": 0.07617808344724214,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.3225806451612903,
						"acc_norm,none": 0.3225806451612903,
						"acc_norm_stderr,none": 0.08534681648595453,
						"acc_stderr,none": 0.08534681648595453,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.3548387096774194,
						"acc_norm,none": 0.3548387096774194,
						"acc_norm_stderr,none": 0.08735525166275225,
						"acc_stderr,none": 0.08735525166275225,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756935,
						"acc_stderr,none": 0.10513149660756935,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09933992677987828,
						"acc_stderr,none": 0.09933992677987828,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.1123666437438737,
						"acc_stderr,none": 0.1123666437438737,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.09523809523809523,
						"acc_stderr,none": 0.09523809523809523,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.043478260869565216,
						"acc_norm,none": 0.043478260869565216,
						"acc_norm_stderr,none": 0.04347826086956523,
						"acc_stderr,none": 0.04347826086956523,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.22448979591836735,
						"acc_norm,none": 0.22448979591836735,
						"acc_norm_stderr,none": 0.06022425581505364,
						"acc_stderr,none": 0.06022425581505364,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.1049727762162956,
						"acc_stderr,none": 0.1049727762162956,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.16666666666666666,
						"acc_norm,none": 0.16666666666666666,
						"acc_norm_stderr,none": 0.0903876907577734,
						"acc_stderr,none": 0.0903876907577734,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.13793103448275862,
						"acc_norm,none": 0.13793103448275862,
						"acc_norm_stderr,none": 0.06516628844986677,
						"acc_stderr,none": 0.06516628844986677,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.0652050663696626,
						"acc_stderr,none": 0.0652050663696626,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.06791703342160262,
						"acc_stderr,none": 0.06791703342160262,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.34782608695652173,
						"acc_norm,none": 0.34782608695652173,
						"acc_norm_stderr,none": 0.07099970268936745,
						"acc_stderr,none": 0.07099970268936745,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482894,
						"acc_stderr,none": 0.09810018692482894,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.24926610257295803,
						"acc_norm,none": 0.24926610257295803,
						"acc_norm_stderr,none": 0.03432229828879208,
						"acc_stderr,none": 0.03432229828879208,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.2485207100591716,
						"acc_norm,none": 0.2485207100591716,
						"acc_norm_stderr,none": 0.033341501981019615,
						"acc_stderr,none": 0.033341501981019615,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.03538668490313392,
						"acc_stderr,none": 0.03538668490313392,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03363591048272823,
						"acc_stderr,none": 0.03363591048272823,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.03404916326237584,
						"acc_stderr,none": 0.03404916326237584,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.0340150671524904,
						"acc_stderr,none": 0.0340150671524904,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2535885167464115,
						"acc_norm,none": 0.2535885167464115,
						"acc_norm_stderr,none": 0.030166316298847994,
						"acc_stderr,none": 0.030166316298847994,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.24375,
						"acc_norm,none": 0.24375,
						"acc_norm_stderr,none": 0.03404916326237584,
						"acc_stderr,none": 0.03404916326237584,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.24427480916030533,
						"acc_norm,none": 0.24427480916030533,
						"acc_norm_stderr,none": 0.03768335959728744,
						"acc_stderr,none": 0.03768335959728744,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037267799624996496,
						"acc_stderr,none": 0.037267799624996496,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.042188119282053044,
						"acc_stderr,none": 0.042188119282053044,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25077399380804954,
						"acc_norm,none": 0.25077399380804954,
						"acc_norm_stderr,none": 0.024155705949743273,
						"acc_stderr,none": 0.024155705949743273,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03039153369274154,
						"acc_stderr,none": 0.03039153369274154,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.25139664804469275,
						"acc_norm,none": 0.25139664804469275,
						"acc_norm_stderr,none": 0.032515888371841106,
						"acc_stderr,none": 0.032515888371841106,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2489451476793249,
						"acc_norm,none": 0.2489451476793249,
						"acc_norm_stderr,none": 0.028146970599422644,
						"acc_stderr,none": 0.028146970599422644,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.19811320754716982,
						"acc_norm,none": 0.19811320754716982,
						"acc_norm_stderr,none": 0.03889722288318549,
						"acc_stderr,none": 0.03889722288318549,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.29906542056074764,
						"acc_norm,none": 0.29906542056074764,
						"acc_norm_stderr,none": 0.044470182376718334,
						"acc_stderr,none": 0.044470182376718334,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.04252016223763312,
						"acc_stderr,none": 0.04252016223763312,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.28703703703703703,
						"acc_norm,none": 0.28703703703703703,
						"acc_norm_stderr,none": 0.043733130409147614,
						"acc_stderr,none": 0.043733130409147614,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.04084247315337098,
						"acc_stderr,none": 0.04084247315337098,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2564102564102564,
						"acc_norm,none": 0.2564102564102564,
						"acc_norm_stderr,none": 0.026475851706699697,
						"acc_stderr,none": 0.026475851706699697,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03039153369274154,
						"acc_stderr,none": 0.03039153369274154,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.24561403508771928,
						"acc_norm,none": 0.24561403508771928,
						"acc_norm_stderr,none": 0.03301405946987251,
						"acc_stderr,none": 0.03301405946987251,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.25170068027210885,
						"acc_norm,none": 0.25170068027210885,
						"acc_norm_stderr,none": 0.03591728013761647,
						"acc_stderr,none": 0.03591728013761647,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2517985611510791,
						"acc_norm,none": 0.2517985611510791,
						"acc_norm_stderr,none": 0.03694846055443904,
						"acc_stderr,none": 0.03694846055443904,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.25157232704402516,
						"acc_norm,none": 0.25157232704402516,
						"acc_norm_stderr,none": 0.034520558111649044,
						"acc_stderr,none": 0.034520558111649044,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.25153374233128833,
						"acc_norm,none": 0.25153374233128833,
						"acc_norm_stderr,none": 0.034089978868575295,
						"acc_stderr,none": 0.034089978868575295,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.03336605189761061,
						"acc_stderr,none": 0.03336605189761061,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.026883687473220848,
						"acc_stderr,none": 0.026883687473220848,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.25757575757575757,
						"acc_norm,none": 0.25757575757575757,
						"acc_norm_stderr,none": 0.031156269519646836,
						"acc_stderr,none": 0.031156269519646836,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.226890756302521,
						"acc_norm,none": 0.226890756302521,
						"acc_norm_stderr,none": 0.027205371538279472,
						"acc_stderr,none": 0.027205371538279472,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.2391304347826087,
						"acc_norm,none": 0.2391304347826087,
						"acc_norm_stderr,none": 0.028187385293933952,
						"acc_stderr,none": 0.028187385293933952,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.037125378336148665,
						"acc_stderr,none": 0.037125378336148665,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.24475524475524477,
						"acc_norm,none": 0.24475524475524477,
						"acc_norm_stderr,none": 0.03607993033081377,
						"acc_stderr,none": 0.03607993033081377,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.2556818181818182,
						"acc_norm,none": 0.2556818181818182,
						"acc_norm_stderr,none": 0.03297692925434462,
						"acc_stderr,none": 0.03297692925434462,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.26174496644295303,
						"acc_norm,none": 0.26174496644295303,
						"acc_norm_stderr,none": 0.036133623910754545,
						"acc_stderr,none": 0.036133623910754545,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323176,
						"acc_stderr,none": 0.03307162750323176,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2542372881355932,
						"acc_norm,none": 0.2542372881355932,
						"acc_norm_stderr,none": 0.04025566684714263,
						"acc_stderr,none": 0.04025566684714263,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.25609756097560976,
						"acc_norm,none": 0.25609756097560976,
						"acc_norm_stderr,none": 0.03418746588364998,
						"acc_stderr,none": 0.03418746588364998,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.04069306319721376,
						"acc_stderr,none": 0.04069306319721376,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.24475524475524477,
						"acc_norm,none": 0.24475524475524477,
						"acc_norm_stderr,none": 0.03607993033081377,
						"acc_stderr,none": 0.03607993033081377,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.25396825396825395,
						"acc_norm,none": 0.25396825396825395,
						"acc_norm_stderr,none": 0.03893259610604673,
						"acc_stderr,none": 0.03893259610604673,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.25405405405405407,
						"acc_norm,none": 0.25405405405405407,
						"acc_norm_stderr,none": 0.032092816451453864,
						"acc_stderr,none": 0.032092816451453864,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2441860465116279,
						"acc_norm,none": 0.2441860465116279,
						"acc_norm_stderr,none": 0.03285260554707746,
						"acc_stderr,none": 0.03285260554707746,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.24817518248175183,
						"acc_norm,none": 0.24817518248175183,
						"acc_norm_stderr,none": 0.021332687690541908,
						"acc_stderr,none": 0.021332687690541908,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.24766355140186916,
						"acc_norm,none": 0.24766355140186916,
						"acc_norm_stderr,none": 0.029576535293164483,
						"acc_stderr,none": 0.029576535293164483,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.25203252032520324,
						"acc_norm,none": 0.25203252032520324,
						"acc_norm_stderr,none": 0.039308795268239924,
						"acc_stderr,none": 0.039308795268239924,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.2540983606557377,
						"acc_norm,none": 0.2540983606557377,
						"acc_norm_stderr,none": 0.03957756102798663,
						"acc_stderr,none": 0.03957756102798663,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2523809523809524,
						"acc_norm,none": 0.2523809523809524,
						"acc_norm_stderr,none": 0.03004659915603149,
						"acc_stderr,none": 0.03004659915603149,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.032364888900157734,
						"acc_stderr,none": 0.032364888900157734,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24338624338624337,
						"acc_norm,none": 0.24338624338624337,
						"acc_norm_stderr,none": 0.031297251928558485,
						"acc_stderr,none": 0.031297251928558485,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04037864265436242,
						"acc_stderr,none": 0.04037864265436242,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2482758620689655,
						"acc_norm,none": 0.2482758620689655,
						"acc_norm_stderr,none": 0.0360010569272777,
						"acc_stderr,none": 0.0360010569272777,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055043,
						"acc_stderr,none": 0.04232473532055043,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.25142857142857145,
						"acc_norm,none": 0.25142857142857145,
						"acc_norm_stderr,none": 0.032888897342098204,
						"acc_stderr,none": 0.032888897342098204,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.25118483412322273,
						"acc_norm,none": 0.25118483412322273,
						"acc_norm_stderr,none": 0.029927771242945204,
						"acc_stderr,none": 0.029927771242945204,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.022360679774997897,
						"acc_stderr,none": 0.022360679774997897,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.24568965517241378,
						"acc_norm,none": 0.24568965517241378,
						"acc_norm_stderr,none": 0.028324514684171163,
						"acc_stderr,none": 0.028324514684171163,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2471264367816092,
						"acc_norm,none": 0.2471264367816092,
						"acc_norm_stderr,none": 0.032794240385439676,
						"acc_stderr,none": 0.032794240385439676,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.03712537833614866,
						"acc_stderr,none": 0.03712537833614866,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.028952167450890808,
						"acc_stderr,none": 0.028952167450890808,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.24848484848484848,
						"acc_norm,none": 0.24848484848484848,
						"acc_norm_stderr,none": 0.03374402644139404,
						"acc_stderr,none": 0.03374402644139404,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.03186439492581517,
						"acc_stderr,none": 0.03186439492581517,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2484472049689441,
						"acc_norm,none": 0.2484472049689441,
						"acc_norm_stderr,none": 0.03416149068322981,
						"acc_stderr,none": 0.03416149068322981,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03434014098717226,
						"acc_stderr,none": 0.03434014098717226,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.021814080973054845,
						"mcc_stderr,none": 0.031998518125054325
					},
					"copa": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.04020151261036845,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.7202966607036374,
						"likelihood_diff_stderr,none": 0.49782881908424564,
						"pct_stereotype,none": 0.5755813953488372,
						"pct_stereotype_stderr,none": 0.08976558079188013
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.6595855694692903,
						"likelihood_diff_stderr,none": 0.08906801031941602,
						"pct_stereotype,none": 0.6535480023852117,
						"pct_stereotype_stderr,none": 0.011623134771282741
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 4.21565934065934,
						"likelihood_diff_stderr,none": 0.38648995905994604,
						"pct_stereotype,none": 0.6153846153846154,
						"pct_stereotype_stderr,none": 0.05128205128205124
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.181818181818182,
						"likelihood_diff_stderr,none": 2.1479617476625346,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.15,
						"likelihood_diff_stderr,none": 0.7219889395594326,
						"pct_stereotype,none": 0.7384615384615385,
						"pct_stereotype_stderr,none": 0.05493406483494501
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.6734375,
						"likelihood_diff_stderr,none": 0.17546869526143857,
						"pct_stereotype,none": 0.675,
						"pct_stereotype_stderr,none": 0.02622395707678178
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.511574074074074,
						"likelihood_diff_stderr,none": 0.21297805904627023,
						"pct_stereotype,none": 0.6018518518518519,
						"pct_stereotype_stderr,none": 0.033384734032074016
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.701388888888889,
						"likelihood_diff_stderr,none": 0.32644507224628055,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.3366141732283463,
						"likelihood_diff_stderr,none": 0.14924735562642064,
						"pct_stereotype,none": 0.5708661417322834,
						"pct_stereotype_stderr,none": 0.02198161280908021
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 4.068693693693693,
						"likelihood_diff_stderr,none": 0.3735973588429742,
						"pct_stereotype,none": 0.7927927927927928,
						"pct_stereotype_stderr,none": 0.03864434340455356
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.884408602150538,
						"likelihood_diff_stderr,none": 0.45865955611362014,
						"pct_stereotype,none": 0.8709677419354839,
						"pct_stereotype_stderr,none": 0.03495073154102977
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.255263157894737,
						"likelihood_diff_stderr,none": 0.24646772141799336,
						"pct_stereotype,none": 0.6684210526315789,
						"pct_stereotype_stderr,none": 0.0342442478876195
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.782200357781753,
						"likelihood_diff_stderr,none": 0.08936446088810855,
						"pct_stereotype,none": 0.49672033392963627,
						"pct_stereotype_stderr,none": 0.012213036478213844
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 4.05,
						"likelihood_diff_stderr,none": 0.44308038178089215,
						"pct_stereotype,none": 0.45555555555555555,
						"pct_stereotype_stderr,none": 0.05279009646630345
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 4.365384615384615,
						"likelihood_diff_stderr,none": 0.975096718006709,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 5.295454545454546,
						"likelihood_diff_stderr,none": 0.5182820007028953,
						"pct_stereotype,none": 0.6060606060606061,
						"pct_stereotype_stderr,none": 0.06060606060606063
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.1070872274143304,
						"likelihood_diff_stderr,none": 0.1631141268430045,
						"pct_stereotype,none": 0.514018691588785,
						"pct_stereotype_stderr,none": 0.027939861549302374
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 4.383893280632411,
						"likelihood_diff_stderr,none": 0.2320676661871362,
						"pct_stereotype,none": 0.30434782608695654,
						"pct_stereotype_stderr,none": 0.02898550724637675
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.7881944444444446,
						"likelihood_diff_stderr,none": 0.4366583039727521,
						"pct_stereotype,none": 0.5138888888888888,
						"pct_stereotype_stderr,none": 0.05931618532716555
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.2233695652173915,
						"likelihood_diff_stderr,none": 0.15497666526423515,
						"pct_stereotype,none": 0.47608695652173916,
						"pct_stereotype_stderr,none": 0.023311295211252404
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.723913043478261,
						"likelihood_diff_stderr,none": 0.4068489486996423,
						"pct_stereotype,none": 0.5826086956521739,
						"pct_stereotype_stderr,none": 0.04618572379512261
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 4.085164835164835,
						"likelihood_diff_stderr,none": 0.3832819638342877,
						"pct_stereotype,none": 0.6813186813186813,
						"pct_stereotype_stderr,none": 0.049117041148312765
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.657525510204081,
						"likelihood_diff_stderr,none": 0.27943369532035395,
						"pct_stereotype,none": 0.6020408163265306,
						"pct_stereotype_stderr,none": 0.0350521715047299
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.04429133858267716,
						"exact_match_stderr,none": 0.004565277720393179
					},
					"glue": {
						"acc,none": 0.4824807416097276,
						"acc_stderr,none": 0.06259412995558915,
						"alias": "glue",
						"f1,none": 0.32880047166047305,
						"f1_stderr,none": 0.0001643198371930269,
						"mcc,none": -0.021814080973054845,
						"mcc_stderr,none": 0.0010239051621994302
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.02122820318423048,
						"exact_match_stderr,get-answer": 0.003970449129848636
					},
					"hellaswag": {
						"acc,none": 0.5399322844054969,
						"acc_norm,none": 0.7200756821350328,
						"acc_norm_stderr,none": 0.0044804424467629125,
						"acc_stderr,none": 0.004973842670559799,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.10950043315044758,
						"acc_norm,none": 0.10950043315044758,
						"acc_norm_stderr,none": 0.0631587889354843,
						"acc_stderr,none": 0.0631587889354843,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.040936018074033256,
						"acc_stderr,none": 0.040936018074033256,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.101,
						"acc_norm,none": 0.101,
						"acc_norm_stderr,none": 0.009533618929340973,
						"acc_stderr,none": 0.009533618929340973,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.081,
						"acc_norm,none": 0.081,
						"acc_norm_stderr,none": 0.008632121032139962,
						"acc_stderr,none": 0.008632121032139962,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.203,
						"acc_norm,none": 0.203,
						"acc_norm_stderr,none": 0.01272607374459827,
						"acc_stderr,none": 0.01272607374459827,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.012886662332274555,
						"acc_stderr,none": 0.012886662332274555,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.18166666666666667,
						"acc_norm,none": 0.18166666666666667,
						"acc_norm_stderr,none": 0.015753945309122372,
						"acc_stderr,none": 0.015753945309122372,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.023,
						"acc_norm,none": 0.023,
						"acc_norm_stderr,none": 0.004742730594656793,
						"acc_stderr,none": 0.004742730594656793,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.012,
						"acc_norm,none": 0.012,
						"acc_norm_stderr,none": 0.003444977194099833,
						"acc_stderr,none": 0.003444977194099833,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.034,
						"acc_norm,none": 0.034,
						"acc_norm_stderr,none": 0.005733836139695457,
						"acc_stderr,none": 0.005733836139695457,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.043,
						"acc_norm,none": 0.043,
						"acc_norm_stderr,none": 0.006418114379799741,
						"acc_stderr,none": 0.006418114379799741,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.040347329239296424,
						"acc_stderr,none": 0.040347329239296424,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.04093601807403326,
						"acc_stderr,none": 0.04093601807403326,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.037,
						"acc_norm,none": 0.037,
						"acc_norm_stderr,none": 0.005972157622389645,
						"acc_stderr,none": 0.005972157622389645,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.041,
						"acc_norm,none": 0.041,
						"acc_norm_stderr,none": 0.006273624021118766,
						"acc_stderr,none": 0.006273624021118766,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.182,
						"acc_norm,none": 0.182,
						"acc_norm_stderr,none": 0.012207580637662146,
						"acc_stderr,none": 0.012207580637662146,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.049,
						"acc_norm,none": 0.049,
						"acc_norm_stderr,none": 0.0068297617561409165,
						"acc_stderr,none": 0.0068297617561409165,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.141,
						"acc_norm,none": 0.141,
						"acc_norm_stderr,none": 0.011010914595992436,
						"acc_stderr,none": 0.011010914595992436,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.010463483381956722,
						"acc_stderr,none": 0.010463483381956722,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.094,
						"acc_norm,none": 0.094,
						"acc_norm_stderr,none": 0.009233052000787728,
						"acc_stderr,none": 0.009233052000787728,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.099,
						"acc_norm,none": 0.099,
						"acc_norm_stderr,none": 0.009449248027662758,
						"acc_stderr,none": 0.009449248027662758,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.04093601807403326,
						"acc_stderr,none": 0.04093601807403326,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.04,
						"acc_norm,none": 0.04,
						"acc_norm_stderr,none": 0.006199874066337077,
						"acc_stderr,none": 0.006199874066337077,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.043,
						"acc_norm,none": 0.043,
						"acc_norm_stderr,none": 0.006418114379799741,
						"acc_stderr,none": 0.006418114379799741,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.066,
						"acc_norm,none": 0.066,
						"acc_norm_stderr,none": 0.00785529793869758,
						"acc_stderr,none": 0.00785529793869758,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.232,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.013354937452281576,
						"acc_stderr,none": 0.013354937452281576,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.072,
						"acc_norm,none": 0.072,
						"acc_norm_stderr,none": 0.008178195576218681,
						"acc_stderr,none": 0.008178195576218681,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.191,
						"acc_norm,none": 0.191,
						"acc_norm_stderr,none": 0.01243678711217947,
						"acc_stderr,none": 0.01243678711217947,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.16,
						"acc_norm,none": 0.16,
						"acc_norm_stderr,none": 0.014979117350308798,
						"acc_stderr,none": 0.014979117350308798,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.109,
						"acc_norm,none": 0.109,
						"acc_norm_stderr,none": 0.009859828407037186,
						"acc_stderr,none": 0.009859828407037186,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.118,
						"acc_norm,none": 0.118,
						"acc_norm_stderr,none": 0.010206869264381791,
						"acc_stderr,none": 0.010206869264381791,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.068,
						"acc_norm,none": 0.068,
						"acc_norm_stderr,none": 0.007964887911291603,
						"acc_stderr,none": 0.007964887911291603,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.096,
						"acc_norm,none": 0.096,
						"acc_norm_stderr,none": 0.009320454434783219,
						"acc_stderr,none": 0.009320454434783219,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.02395648228514077,
						"acc_stderr,none": 0.02395648228514077,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.237,
						"acc_norm,none": 0.237,
						"acc_norm_stderr,none": 0.013454070462577945,
						"acc_stderr,none": 0.013454070462577945,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.048,
						"acc_norm,none": 0.048,
						"acc_norm_stderr,none": 0.0067632641336666756,
						"acc_stderr,none": 0.0067632641336666756,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.010463483381956722,
						"acc_stderr,none": 0.010463483381956722,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.027234326551496862,
						"acc_stderr,none": 0.027234326551496862,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.158,
						"acc_norm,none": 0.158,
						"acc_norm_stderr,none": 0.01153989467755957,
						"acc_stderr,none": 0.01153989467755957,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.156,
						"acc_norm,none": 0.156,
						"acc_norm_stderr,none": 0.01148023500612234,
						"acc_stderr,none": 0.01148023500612234,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.04,
						"acc_norm,none": 0.04,
						"acc_norm_stderr,none": 0.006199874066337041,
						"acc_stderr,none": 0.006199874066337041,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.4753343565007674,
						"acc_norm,none": 0.414,
						"acc_norm_stderr,none": 0.0004861803607214433,
						"acc_stderr,none": 0.04927821824566108,
						"alias": "kobest",
						"f1,none": 0.37824953467700784,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.479,
						"acc_stderr,none": 0.015805341148131296,
						"alias": " - kobest_copa",
						"f1,none": 0.4777460683100123,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.302,
						"acc_norm,none": 0.414,
						"acc_norm_stderr,none": 0.02204949796982787,
						"acc_stderr,none": 0.020553269174209188,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.2981294496039646,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5491183879093199,
						"acc_stderr,none": 0.02500441294229605,
						"alias": " - kobest_sentineg",
						"f1,none": 0.5435110777077592,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6187657675140694,
						"acc_stderr,none": 0.009215457834445401,
						"alias": "lambada",
						"perplexity,none": 4.973804519967448,
						"perplexity_stderr,none": 0.13571139943914853
					},
					"lambada_cloze": {
						"acc,none": 0.01620415292062876,
						"acc_stderr,none": 0.005940749331425778,
						"alias": "lambada_cloze",
						"perplexity,none": 315.1307618604535,
						"perplexity_stderr,none": 85.28465503213083
					},
					"lambada_multilingual": {
						"acc,none": 0.33277702309334367,
						"acc_stderr,none": 0.08799445223480894,
						"alias": "lambada_multilingual",
						"perplexity,none": 156.89292796722805,
						"perplexity_stderr,none": 56.33957812323677
					},
					"lambada_openai": {
						"acc,none": 0.6332233650300796,
						"acc_stderr,none": 0.006714155098732923,
						"alias": " - lambada_openai",
						"perplexity,none": 4.82564156138582,
						"perplexity_stderr,none": 0.10711724981923416
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.004851542790607413,
						"acc_stderr,none": 0.0009680458844827139,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 483.9663998386866,
						"perplexity_stderr,none": 16.569441423025005
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.2173491170192121,
						"acc_stderr,none": 0.005746124780260983,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 262.9490349906636,
						"perplexity_stderr,none": 16.048028171430108
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6320589947603338,
						"acc_stderr,none": 0.00671861841779132,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.823464814484801,
						"perplexity_stderr,none": 0.10686896560814735
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.2536386570929556,
						"acc_stderr,none": 0.006061698956508256,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 196.04099021583696,
						"perplexity_stderr,none": 11.293936946754558
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.2963322336503008,
						"acc_stderr,none": 0.006361878179691865,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 111.68554553363627,
						"perplexity_stderr,none": 6.315272640151771
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.2645061129439162,
						"acc_stderr,none": 0.006144965702579054,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 208.96560428151852,
						"perplexity_stderr,none": 12.937586899750382
					},
					"lambada_standard": {
						"acc,none": 0.6062487871143023,
						"acc_stderr,none": 0.006806885831834558,
						"alias": " - lambada_standard",
						"perplexity,none": 5.128848160217075,
						"perplexity_stderr,none": 0.11584910993008157
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.027556763050650107,
						"acc_stderr,none": 0.002280648227904733,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 146.29512388222042,
						"perplexity_stderr,none": 4.2787126888127025
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.24618320610687022,
						"exact_match_stderr,get-answer": 0.010868610457495204
					},
					"logiqa": {
						"acc,none": 0.21812596006144394,
						"acc_norm,none": 0.27956989247311825,
						"acc_norm_stderr,none": 0.017602909186822453,
						"acc_stderr,none": 0.016198149258419312,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.23536895674300254,
						"acc_norm,none": 0.2837150127226463,
						"acc_norm_stderr,none": 0.011373548669758793,
						"acc_stderr,none": 0.010703170941435934,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.25996649916247905,
						"acc_norm,none": 0.2609715242881072,
						"acc_norm_stderr,none": 0.00803947590672677,
						"acc_stderr,none": 0.008029434758777935,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.4176022029231095,
						"acc_stderr,none": 0.0050755399847433515,
						"alias": "mc_taco",
						"f1,none": 0.5133197628108682,
						"f1_stderr,none": 0.005740964843792283
					},
					"medmcqa": {
						"acc,none": 0.2983504661726034,
						"acc_norm,none": 0.2983504661726034,
						"acc_norm_stderr,none": 0.007075081820104072,
						"acc_stderr,none": 0.007075081820104072,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.27258444619010214,
						"acc_norm,none": 0.27258444619010214,
						"acc_norm_stderr,none": 0.012485279567743063,
						"acc_stderr,none": 0.012485279567743063,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.25601766130180886,
						"acc_stderr,none": 0.03869503108293753,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.2,
						"acc_stderr,none": 0.03455473702325435,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.19078947368421054,
						"acc_stderr,none": 0.031975658210325004,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2641509433962264,
						"acc_stderr,none": 0.027134291628741716,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2361111111111111,
						"acc_stderr,none": 0.03551446610810826,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909282,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.04560480215720684,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.24277456647398843,
						"acc_stderr,none": 0.0326926380614177,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.23529411764705882,
						"acc_stderr,none": 0.042207736591714534,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2936170212765957,
						"acc_stderr,none": 0.029771642712491227,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.24561403508771928,
						"acc_stderr,none": 0.040493392977481404,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.25517241379310346,
						"acc_stderr,none": 0.03632984052707842,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2328042328042328,
						"acc_stderr,none": 0.021765961672154544,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.21428571428571427,
						"acc_stderr,none": 0.03670066451047181,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.2064516129032258,
						"acc_stderr,none": 0.02302589961718872,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.24630541871921183,
						"acc_stderr,none": 0.030315099285617715,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.23,
						"acc_stderr,none": 0.04229525846816506,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.0347769116216366,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.20202020202020202,
						"acc_stderr,none": 0.028606204289229876,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.22797927461139897,
						"acc_stderr,none": 0.030276909945178274,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2512820512820513,
						"acc_stderr,none": 0.02199201666237056,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.026719240783712166,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.19747899159663865,
						"acc_stderr,none": 0.025859164122051456,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.2582781456953642,
						"acc_stderr,none": 0.035737053147634576,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.24220183486238533,
						"acc_stderr,none": 0.01836817630659862,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2037037037037037,
						"acc_stderr,none": 0.027467401804057982,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.28921568627450983,
						"acc_stderr,none": 0.03182231867647553,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3206751054852321,
						"acc_stderr,none": 0.030381931949990417,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3094170403587444,
						"acc_stderr,none": 0.031024411740572206,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.31297709923664124,
						"acc_stderr,none": 0.04066962905677698,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.25781083953241235,
						"acc_stderr,none": 0.03292475205414248,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.23140495867768596,
						"acc_stderr,none": 0.0384985609879409,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.04453197507374984,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2392638036809816,
						"acc_stderr,none": 0.033519538795212696,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.3392857142857143,
						"acc_stderr,none": 0.04493949068613539,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.27184466019417475,
						"acc_stderr,none": 0.044052680241409216,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3162393162393162,
						"acc_stderr,none": 0.030463656747340265,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.28991060025542786,
						"acc_stderr,none": 0.016225017944770954,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.24855491329479767,
						"acc_stderr,none": 0.023267528432100174,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23798882681564246,
						"acc_stderr,none": 0.0142426300705749,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.02526169121972947,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.27615062761506276,
						"acc_stderr,none": 0.03855567726875051,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.29260450160771706,
						"acc_stderr,none": 0.025839898334877983,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2623456790123457,
						"acc_stderr,none": 0.024477222856135107,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2730496453900709,
						"acc_stderr,none": 0.026577860943307857,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.24511082138200782,
						"acc_stderr,none": 0.010986307870045509,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.19852941176470587,
						"acc_stderr,none": 0.02423101337054108,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.25163398692810457,
						"acc_stderr,none": 0.01755581809132227,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.32727272727272727,
						"acc_stderr,none": 0.044942908662520896,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.22448979591836735,
						"acc_stderr,none": 0.026711430555538415,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.24666883327916803,
						"acc_stderr,none": 0.03719880337640812,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.25870646766169153,
						"acc_stderr,none": 0.030965903123573026,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.24262607040913417,
						"acc_stderr,none": 0.04341507444364946,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.2710843373493976,
						"acc_stderr,none": 0.034605799075530276,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3216374269005848,
						"acc_stderr,none": 0.03582529442573122,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.3632195618950586,
						"acc_stderr,none": 0.004854633461302187,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3708299430431245,
						"acc_stderr,none": 0.004871610659383,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.36519607843137253,
						"acc_stderr,none": 0.023866330396787993,
						"alias": "mrpc",
						"f1,none": 0.20795107033639143,
						"f1_stderr,none": 0.030056796300562325
					},
					"multimedqa": {
						"acc,none": 0.30716820440028386,
						"acc_norm,none": 0.28695699660997015,
						"acc_norm_stderr,none": 0.00010657522943684276,
						"acc_stderr,none": 0.0863674796567071,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.48102310231023104,
						"acc_stderr,none": 0.007176628581973889,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.683314523901412,
						"mrr_stderr,none": 0.010296913831452465,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.43340857787810383,
						"r@2_stderr,none": 0.016657587894501214
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6507712584451413,
						"mrr_stderr,none": 0.010467151685771016,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.45146726862302483,
						"r@2_stderr,none": 0.016727951978179462
					},
					"openbookqa": {
						"acc,none": 0.314,
						"acc_norm,none": 0.424,
						"acc_norm_stderr,none": 0.022122993778135404,
						"acc_stderr,none": 0.020776701920308997,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.5065,
						"acc_stderr,none": 0.0111821910061423,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.413,
						"acc_stderr,none": 0.011012544577391419,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4225,
						"acc_stderr,none": 0.011047981894987801,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.011178751372184865,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.011107641056719627,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.011122493197456285,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.552,
						"acc_stderr,none": 0.011122493197456286,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.5025714285714286,
						"acc_stderr,none": 0.0412607911018146,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7758433079434167,
						"acc_norm,none": 0.79379760609358,
						"acc_norm_stderr,none": 0.009439460331609507,
						"acc_stderr,none": 0.00972989795641004,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.27818104184457726,
						"acc_norm,none": 0.2861870196413322,
						"acc_norm_stderr,none": 0.0033021008866807697,
						"acc_stderr,none": 0.0032737918117096993,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.63,
						"acc_stderr,none": 0.021613289165165788,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.714081943809347,
						"acc_norm,none": 0.5903135571224072,
						"acc_norm_stderr,none": 0.004787723009101459,
						"acc_stderr,none": 0.1561891820302747,
						"alias": "pythia",
						"bits_per_byte,none": 0.6674379127382432,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5882498828646967,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.82564156138582,
						"perplexity_stderr,none": 0.10711724981923416,
						"word_perplexity,none": 11.868710493412717,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.41843971631205673,
						"acc_norm,none": 0.4716312056737589,
						"acc_norm_stderr,none": 0.07363433183038441,
						"acc_stderr,none": 0.05397767592522113,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.5166666666666667,
						"acc_norm,none": 0.625,
						"acc_norm_stderr,none": 0.04437947515604539,
						"acc_stderr,none": 0.04580945392704764,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.38125,
						"acc_norm,none": 0.4875,
						"acc_norm_stderr,none": 0.03964018591811396,
						"acc_stderr,none": 0.03851802138867096,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.397887323943662,
						"acc_norm,none": 0.397887323943662,
						"acc_norm_stderr,none": 0.029095492917064897,
						"acc_stderr,none": 0.0290954929170649,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.4993593263774483,
						"acc_stderr,none": 0.006765404997877071,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.5357407865446451,
						"acc_stderr,none": 0.0024803394537062412,
						"alias": "qqp",
						"f1,none": 0.33007352416303803,
						"f1_stderr,none": 0.0036252777492624265
					},
					"race": {
						"acc,none": 0.3866028708133971,
						"acc_stderr,none": 0.015071384773047118,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.555956678700361,
						"acc_stderr,none": 0.029907396333795997,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.932,
						"acc_norm,none": 0.914,
						"acc_norm_stderr,none": 0.008870325962594766,
						"acc_stderr,none": 0.007964887911291603,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.555956678700361,
						"acc_stderr,none": 0.029907396333795997,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.01694185368929243,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.548335499350195,
						"acc_norm,none": 0.7463760871738478,
						"acc_norm_stderr,none": 0.0030761299614220044,
						"acc_stderr,none": 0.0035185350577884005,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5085687664304016,
						"acc_stderr,none": 0.04771180119582942,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5911458333333334,
						"acc_stderr,none": 0.004920406628415575,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.40093240093240096,
						"acc_stderr,none": 0.004934043083404487,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5318627450980392,
						"acc_stderr,none": 0.004940917376708871,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.2944805608196479,
						"acc_stderr,none": 0.0012737193650940938,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.31211750305997554,
						"bleu_acc_stderr,none": 0.016220756769520926,
						"bleu_diff,none": -8.059779723878206,
						"bleu_diff_stderr,none": 0.8413101248559008,
						"bleu_max,none": 23.07021704235888,
						"bleu_max_stderr,none": 0.7438478606491113,
						"rouge1_acc,none": 0.2631578947368421,
						"rouge1_acc_stderr,none": 0.015415241740237009,
						"rouge1_diff,none": -10.724846673037314,
						"rouge1_diff_stderr,none": 0.9724663278923614,
						"rouge1_max,none": 46.88371865051904,
						"rouge1_max_stderr,none": 0.8935264336981529,
						"rouge2_acc,none": 0.20563035495716034,
						"rouge2_acc_stderr,none": 0.014148482219460959,
						"rouge2_diff,none": -12.936729844219554,
						"rouge2_diff_stderr,none": 1.1326910481164179,
						"rouge2_max,none": 30.369314980769538,
						"rouge2_max_stderr,none": 1.0076773361570666,
						"rougeL_acc,none": 0.2484700122399021,
						"rougeL_acc_stderr,none": 0.015127427096520674,
						"rougeL_diff,none": -10.8083416385852,
						"rougeL_diff_stderr,none": 0.9800733132645568,
						"rougeL_max,none": 44.22759755647022,
						"rougeL_max_stderr,none": 0.8955574192952942
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.31211750305997554,
						"bleu_acc_stderr,none": 0.016220756769520926,
						"bleu_diff,none": -8.059779723878206,
						"bleu_diff_stderr,none": 0.8413101248559008,
						"bleu_max,none": 23.07021704235888,
						"bleu_max_stderr,none": 0.7438478606491113,
						"rouge1_acc,none": 0.2631578947368421,
						"rouge1_acc_stderr,none": 0.015415241740237009,
						"rouge1_diff,none": -10.724846673037314,
						"rouge1_diff_stderr,none": 0.9724663278923614,
						"rouge1_max,none": 46.88371865051904,
						"rouge1_max_stderr,none": 0.8935264336981529,
						"rouge2_acc,none": 0.20563035495716034,
						"rouge2_acc_stderr,none": 0.014148482219460959,
						"rouge2_diff,none": -12.936729844219554,
						"rouge2_diff_stderr,none": 1.1326910481164179,
						"rouge2_max,none": 30.369314980769538,
						"rouge2_max_stderr,none": 1.0076773361570666,
						"rougeL_acc,none": 0.2484700122399021,
						"rougeL_acc_stderr,none": 0.015127427096520674,
						"rougeL_diff,none": -10.8083416385852,
						"rougeL_diff_stderr,none": 0.9800733132645568,
						"rougeL_max,none": 44.22759755647022,
						"rougeL_max_stderr,none": 0.8955574192952942
					},
					"truthfulqa_mc1": {
						"acc,none": 0.22888616891064872,
						"acc_stderr,none": 0.014706994909055027,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.36068260000727254,
						"acc_stderr,none": 0.013372052908226484,
						"alias": "truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.04429133858267716,
						"exact_match_stderr,none": 0.004565277720393179
					},
					"wic": {
						"acc,none": 0.4890282131661442,
						"acc_stderr,none": 0.019805951085979406,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6674379127382432,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.5882498828646967,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 11.868710493412717,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.65982636148382,
						"acc_stderr,none": 0.013315218762417397,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5070422535211268,
						"acc_stderr,none": 0.059755502635482904,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6057692307692307,
						"acc_stderr,none": 0.048151547759907105,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7106227106227107,
						"acc_stderr,none": 0.02749586023452527,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5358181818181819,
						"acc_stderr,none": 0.03318465937871164,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.486,
						"acc_stderr,none": 0.02237429816635319,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.51,
						"acc_stderr,none": 0.02237859698923078,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.022311333245289663,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.02232498173838525,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.498,
						"acc_stderr,none": 0.02238289498648353,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.53,
						"acc_stderr,none": 0.022342748192502846,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.02210903931061855,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.02223197069632112,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.022252153078595897,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.588,
						"acc_stderr,none": 0.022033677993740865,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.36331994645247656,
						"acc_stderr,none": 0.043604825167842365,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.009448900914617617,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.009476976849778584,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.43052208835341366,
						"acc_stderr,none": 0.00992484453728553,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3317269076305221,
						"acc_stderr,none": 0.009437454900329118,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5108433734939759,
						"acc_stderr,none": 0.010019715824483477,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.39076305220883534,
						"acc_stderr,none": 0.00977996757994179,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.42570281124497994,
						"acc_stderr,none": 0.00991081012782283,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3417670682730924,
						"acc_stderr,none": 0.009506977398287618,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.3409638554216867,
						"acc_stderr,none": 0.009501591178361544,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3353413654618474,
						"acc_stderr,none": 0.009463034891512697,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3377510040160643,
						"acc_stderr,none": 0.009479742273956477,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939166,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3325301204819277,
						"acc_stderr,none": 0.00944319336590334,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3321285140562249,
						"acc_stderr,none": 0.00944032800124063,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3345381526104418,
						"acc_stderr,none": 0.009457404390939166,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5372119607725167,
						"acc_stderr,none": 0.07047680522091254,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.4798146922567836,
						"acc_stderr,none": 0.012856635706498289,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7551290536068829,
						"acc_stderr,none": 0.011066002048086323,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.5883520847121112,
						"acc_stderr,none": 0.012664648329214077,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5082726671078756,
						"acc_stderr,none": 0.0128653640203754,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.4870946393117141,
						"acc_stderr,none": 0.01286283860572848,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5175380542686963,
						"acc_stderr,none": 0.012859207453266306,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4738583719391132,
						"acc_stderr,none": 0.012849526888044216,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.48974189278623426,
						"acc_stderr,none": 0.012864417047980477,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5062872270019855,
						"acc_stderr,none": 0.012866108021218216,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5360688285903376,
						"acc_stderr,none": 0.012833602406620015,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5671740569159497,
						"acc_stderr,none": 0.012750474502985826,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7143178242301641,
						"acc_stderr,none": 0.0906365053279996,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8468817204301076,
						"acc_stderr,none": 0.007469763261341203,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.5783132530120482,
						"acc_stderr,none": 0.05453428485295111,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5255474452554745,
						"acc_stderr,none": 0.016133166089128848,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6007604562737643,
						"acc_stderr,none": 0.03025636835693898,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5301587301587302,
						"acc_stderr,none": 0.028165256808123696,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6587301587301587,
						"acc_stderr,none": 0.021140669478119697,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "tiiuae/falcon-rw-7b"
	},
	"togethercomputer/RedPajama-INCITE-7B-Base": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.6034385569334837,
						"acc_norm,none": 0.5910372040586246,
						"acc_norm_stderr,none": 0.04781644043179842,
						"acc_stderr,none": 0.05560271295416639,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3528125,
						"acc_stderr,none": 0.01616927548412904,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.02535,
						"acc_stderr,none": 0.03228414584237135,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.8199253731343283,
						"acc_stderr,none": 0.16134993865689873,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.2451708766716196,
						"acc_norm,none": 0.2451708766716196,
						"acc_norm_stderr,none": 0.11809812467048535,
						"acc_stderr,none": 0.11809812467048535,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.25280607839751335,
						"acc_norm,none": 0.25280607839751335,
						"acc_norm_stderr,none": 0.04104649589180001,
						"acc_stderr,none": 0.04104649589180001,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.643317680381634,
						"likelihood_diff_stderr,none": 0.43363916971201094,
						"pct_stereotype,none": 0.5854203935599285,
						"pct_stereotype_stderr,none": 0.074218934797104
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.07283464566929133,
						"exact_match_stderr,none": 0.0057662390329137805
					},
					"glue": {
						"acc,none": 0.47479626778111467,
						"acc_stderr,none": 0.07710068131458633,
						"alias": "glue",
						"f1,none": 0.28579319078788673,
						"f1_stderr,none": 0.0019606327113536547,
						"mcc,none": 0.010223886506275306,
						"mcc_stderr,none": 0.0009641574992927934
					},
					"kmmlu": {
						"acc,none": 0.15114062951198382,
						"acc_norm,none": 0.15114062951198382,
						"acc_norm_stderr,none": 0.04219089042469381,
						"acc_stderr,none": 0.04219089042469381,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.4819118614338961,
						"acc_norm,none": 0.438,
						"acc_norm_stderr,none": 0.0004932985971943903,
						"acc_stderr,none": 0.04776314369424466,
						"alias": "kobest",
						"f1,none": 0.3846770639557548,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6549582767320008,
						"acc_stderr,none": 0.02448321431391065,
						"alias": "lambada",
						"perplexity,none": 4.919983099161434,
						"perplexity_stderr,none": 0.47080234373481183
					},
					"lambada_cloze": {
						"acc,none": 0.03793906462254997,
						"acc_stderr,none": 0.006092491808066035,
						"alias": "lambada_cloze",
						"perplexity,none": 302.74656013590857,
						"perplexity_stderr,none": 36.1742705293389
					},
					"lambada_multilingual": {
						"acc,none": 0.43578497962352025,
						"acc_stderr,none": 0.07783182786033929,
						"alias": "lambada_multilingual",
						"perplexity,none": 55.75634065211411,
						"perplexity_stderr,none": 17.112832544800874
					},
					"mmlu": {
						"acc,none": 0.2617860703603475,
						"acc_stderr,none": 0.03633266968838108,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.2680127523910733,
						"acc_stderr,none": 0.028488960342705927,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.25651754103636953,
						"acc_stderr,none": 0.04113627785751167,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2557686057848554,
						"acc_stderr,none": 0.03273477855480962,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.263558515699334,
						"acc_stderr,none": 0.04364865601901659,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.30731014904187365,
						"acc_norm,none": 0.28086754518989626,
						"acc_norm_stderr,none": 9.552328148792985e-05,
						"acc_stderr,none": 0.10566243428903258,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4830714285714286,
						"acc_stderr,none": 0.0307277164128183,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7160772540226711,
						"acc_norm,none": 0.5958619336483942,
						"acc_norm_stderr,none": 0.004598168838341248,
						"acc_stderr,none": 0.15025462680941115,
						"alias": "pythia",
						"bits_per_byte,none": 0.6194402405334403,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.536278995998468,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.004151656494574,
						"perplexity_stderr,none": 0.0853963706849589,
						"word_perplexity,none": 9.934353680376269,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4024822695035461,
						"acc_norm,none": 0.4592198581560284,
						"acc_norm_stderr,none": 0.06096183797928268,
						"acc_stderr,none": 0.044708898258928376,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5150577351835214,
						"acc_stderr,none": 0.012920415764871355,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.2957315334460171,
						"acc_stderr,none": 0.03485008104638614,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2876376988984088,
						"bleu_acc_stderr,none": 0.0002511057022926932,
						"bleu_diff,none": -10.251930996798425,
						"bleu_diff_stderr,none": 0.6423983995999273,
						"bleu_max,none": 25.04500918259965,
						"bleu_max_stderr,none": 0.5666597483767061,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.00024182670675721596,
						"rouge1_diff,none": -12.87367992958503,
						"rouge1_diff_stderr,none": 0.7610821487355909,
						"rouge1_max,none": 49.34764427245758,
						"rouge1_max_stderr,none": 0.7689288016132617,
						"rouge2_acc,none": 0.211750305997552,
						"rouge2_acc_stderr,none": 0.00020454915920036063,
						"rouge2_diff,none": -14.790526943078385,
						"rouge2_diff_stderr,none": 1.0637993438666953,
						"rouge2_max,none": 33.001995528488436,
						"rouge2_max_stderr,none": 0.9809341676009351,
						"rougeL_acc,none": 0.25703794369645044,
						"rougeL_acc_stderr,none": 0.00023403117548621337,
						"rougeL_diff,none": -12.982393096206822,
						"rougeL_diff_stderr,none": 0.7679664439786057,
						"rougeL_max,none": 46.578606282191004,
						"rougeL_max_stderr,none": 0.7764362289690075
					},
					"xcopa": {
						"acc,none": 0.5254545454545455,
						"acc_stderr,none": 0.036407165846333675,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.3827309236947791,
						"acc_stderr,none": 0.05194928176239464,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5524336682510078,
						"acc_stderr,none": 0.06791399332607427,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.74331310406833,
						"acc_stderr,none": 0.08408219267029617,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.6034385569334837,
						"acc_norm,none": 0.5910372040586246,
						"acc_norm_stderr,none": 0.04781644043179842,
						"acc_stderr,none": 0.05560271295416639,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3528125,
						"acc_stderr,none": 0.01616927548412904,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.361,
						"acc_stderr,none": 0.015195720118175115,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.338,
						"acc_stderr,none": 0.014965960710224472,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.35833333333333334,
						"acc_stderr,none": 0.013848054140053426,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.36860068259385664,
						"acc_norm,none": 0.39078498293515357,
						"acc_norm_stderr,none": 0.014258563880513778,
						"acc_stderr,none": 0.014097810678042189,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7192760942760943,
						"acc_norm,none": 0.6898148148148148,
						"acc_norm_stderr,none": 0.009491721291998514,
						"acc_stderr,none": 0.009220526174711361,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.02535,
						"acc_stderr,none": 0.03228414584237135,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0095,
						"acc_stderr,none": 0.002169614853910027,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.071,
						"acc_stderr,none": 0.005744214306500109,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.037,
						"acc_stderr,none": 0.004221896754552657,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.12,
						"acc_stderr,none": 0.007268178121551635,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.0035,
						"acc_stderr,none": 0.0013208888574315673,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.011,
						"acc_stderr,none": 0.002332856855993376,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.0015,
						"acc_stderr,none": 0.0008655920660521431,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.00824295010845987,
						"acc_stderr,none": 0.0018836610014054645,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.8199253731343283,
						"acc_stderr,none": 0.16134993865689873,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.918,
						"acc_stderr,none": 0.008680515615523712,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.99,
						"acc_stderr,none": 0.0031480009386767654,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.997,
						"acc_stderr,none": 0.0017303161543469343,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.797,
						"acc_stderr,none": 0.012726073744598283,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.852,
						"acc_stderr,none": 0.01123486636423526,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.744,
						"acc_stderr,none": 0.013807775152234183,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.617,
						"acc_stderr,none": 0.015380102325652706,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.701,
						"acc_stderr,none": 0.014484778521220463,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.011043457699378239,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.994,
						"acc_stderr,none": 0.002443352199329838,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.969,
						"acc_stderr,none": 0.005483527064679196,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.942,
						"acc_stderr,none": 0.007395315455792938,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.946,
						"acc_stderr,none": 0.007150883521295437,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.932,
						"acc_stderr,none": 0.007964887911291605,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.921,
						"acc_stderr,none": 0.008534156773333435,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.891,
						"acc_stderr,none": 0.009859828407037186,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.0052195060344100465,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.881,
						"acc_stderr,none": 0.010244215145336664,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.755,
						"acc_stderr,none": 0.013607356839598118,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.012864077288499342,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.804,
						"acc_stderr,none": 0.012559527926707368,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.94,
						"acc_stderr,none": 0.007513751157474907,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.845,
						"acc_stderr,none": 0.011450157470799475,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.983,
						"acc_stderr,none": 0.0040899544896891,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.226,
						"acc_stderr,none": 0.013232501619085343,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.875,
						"acc_stderr,none": 0.010463483381956722,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.808,
						"acc_stderr,none": 0.012461592646659992,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.709,
						"acc_stderr,none": 0.014370995982377937,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.809,
						"acc_stderr,none": 0.01243678711217949,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.0058145342727349714,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.868,
						"acc_stderr,none": 0.010709373963528014,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.008333333333333349,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.884,
						"acc_stderr,none": 0.010131468138756978,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.437,
						"acc_stderr,none": 0.015693223928730377,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.831,
						"acc_stderr,none": 0.011856625977890108,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.585,
						"acc_stderr,none": 0.015589035185604628,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.599,
						"acc_stderr,none": 0.015506109745498332,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.721,
						"acc_stderr,none": 0.014190150117612028,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.805,
						"acc_stderr,none": 0.012535235623319329,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.788,
						"acc_stderr,none": 0.012931481864938033,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946097,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.889,
						"acc_stderr,none": 0.009938701010583726,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.802,
						"acc_stderr,none": 0.012607733934175306,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.947,
						"acc_stderr,none": 0.007088105617246439,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.998,
						"acc_stderr,none": 0.001413505570557816,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.892,
						"acc_stderr,none": 0.0098200016513457,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.751,
						"acc_stderr,none": 0.0136816002787023,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.366,
						"acc_stderr,none": 0.015240612726405754,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.93,
						"acc_stderr,none": 0.00807249435832351,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.9,
						"acc_stderr,none": 0.009491579957525042,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.992,
						"acc_stderr,none": 0.0028185003005045044,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.751,
						"acc_stderr,none": 0.013681600278702324,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.466,
						"acc_stderr,none": 0.015782683329937625,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.911,
						"acc_stderr,none": 0.009008893392651528,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.962,
						"acc_stderr,none": 0.006049181150584939,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.667,
						"acc_stderr,none": 0.014910846164229871,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.011075814808567038,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.87,
						"acc_stderr,none": 0.010640169792499366,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.857,
						"acc_stderr,none": 0.01107581480856704,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.849,
						"acc_stderr,none": 0.011328165223341674,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.933,
						"acc_stderr,none": 0.007910345983177547,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.00812557844248791,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.976,
						"acc_stderr,none": 0.004842256441727078,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.974,
						"acc_stderr,none": 0.0050348137353182255,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.402,
						"acc_stderr,none": 0.015512467135715071,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.014632638658632893,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.6987767584097859,
						"acc_stderr,none": 0.00802427870499393,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.14285714285714285,
						"acc_stderr,none": 0.04718416136255829,
						"alias": "cb",
						"f1,none": 0.13680964395850856,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.2451708766716196,
						"acc_norm,none": 0.2451708766716196,
						"acc_norm_stderr,none": 0.11809812467048535,
						"acc_stderr,none": 0.11809812467048535,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.06520506636966263,
						"acc_stderr,none": 0.06520506636966263,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.07872958216222171,
						"acc_stderr,none": 0.07872958216222171,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275461,
						"acc_stderr,none": 0.08124094920275461,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.2127659574468085,
						"acc_norm,none": 0.2127659574468085,
						"acc_norm_stderr,none": 0.060342609647735204,
						"acc_stderr,none": 0.060342609647735204,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.21818181818181817,
						"acc_norm,none": 0.21818181818181817,
						"acc_norm_stderr,none": 0.05620374845754972,
						"acc_stderr,none": 0.05620374845754972,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.21621621621621623,
						"acc_norm,none": 0.21621621621621623,
						"acc_norm_stderr,none": 0.0686105685212965,
						"acc_stderr,none": 0.0686105685212965,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.3125,
						"acc_norm,none": 0.3125,
						"acc_norm_stderr,none": 0.11967838846954226,
						"acc_stderr,none": 0.11967838846954226,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.1724137931034483,
						"acc_norm,none": 0.1724137931034483,
						"acc_norm_stderr,none": 0.07138609234576078,
						"acc_stderr,none": 0.07138609234576078,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.1935483870967742,
						"acc_norm,none": 0.1935483870967742,
						"acc_norm_stderr,none": 0.07213122508063838,
						"acc_stderr,none": 0.07213122508063838,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.22580645161290322,
						"acc_norm,none": 0.22580645161290322,
						"acc_norm_stderr,none": 0.07633651333031764,
						"acc_stderr,none": 0.07633651333031764,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.1094243309804831,
						"acc_stderr,none": 0.1094243309804831,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.10083169033033672,
						"acc_stderr,none": 0.10083169033033672,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.11369720523522561,
						"acc_stderr,none": 0.11369720523522561,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.391304347826087,
						"acc_norm,none": 0.391304347826087,
						"acc_norm_stderr,none": 0.10405096111532161,
						"acc_stderr,none": 0.10405096111532161,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.13636363636363635,
						"acc_norm,none": 0.13636363636363635,
						"acc_norm_stderr,none": 0.0748867700952649,
						"acc_stderr,none": 0.0748867700952649,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.4166666666666667,
						"acc_norm,none": 0.4166666666666667,
						"acc_norm_stderr,none": 0.10279899245732686,
						"acc_stderr,none": 0.10279899245732686,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.08780518530755133,
						"acc_stderr,none": 0.08780518530755133,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09933992677987828,
						"acc_stderr,none": 0.09933992677987828,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333333,
						"acc_stderr,none": 0.08333333333333333,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.09090909090909091,
						"acc_norm,none": 0.09090909090909091,
						"acc_norm_stderr,none": 0.06273323266748675,
						"acc_stderr,none": 0.06273323266748675,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.42857142857142855,
						"acc_norm,none": 0.42857142857142855,
						"acc_norm_stderr,none": 0.11065666703449763,
						"acc_stderr,none": 0.11065666703449763,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.07180198468215396,
						"acc_stderr,none": 0.07180198468215396,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.058172215566282534,
						"acc_stderr,none": 0.058172215566282534,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.22727272727272727,
						"acc_norm,none": 0.22727272727272727,
						"acc_norm_stderr,none": 0.09144861547306321,
						"acc_stderr,none": 0.09144861547306321,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.3888888888888889,
						"acc_norm,none": 0.3888888888888889,
						"acc_norm_stderr,none": 0.11823563735376173,
						"acc_stderr,none": 0.11823563735376173,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857373,
						"acc_stderr,none": 0.08982552969857373,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.16326530612244897,
						"acc_norm,none": 0.16326530612244897,
						"acc_norm_stderr,none": 0.05334825558285076,
						"acc_stderr,none": 0.05334825558285076,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0679170334216026,
						"acc_stderr,none": 0.0679170334216026,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.13043478260869565,
						"acc_norm,none": 0.13043478260869565,
						"acc_norm_stderr,none": 0.05020437123388052,
						"acc_stderr,none": 0.05020437123388052,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520547,
						"acc_stderr,none": 0.08793911249520547,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.25280607839751335,
						"acc_norm,none": 0.25280607839751335,
						"acc_norm_stderr,none": 0.04104649589180001,
						"acc_stderr,none": 0.04104649589180001,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.032793177922689494,
						"acc_stderr,none": 0.032793177922689494,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.03571428571428571,
						"acc_stderr,none": 0.03571428571428571,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2621951219512195,
						"acc_norm,none": 0.2621951219512195,
						"acc_norm_stderr,none": 0.03445000289173461,
						"acc_stderr,none": 0.03445000289173461,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.035410885580708956,
						"acc_stderr,none": 0.035410885580708956,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.22424242424242424,
						"acc_norm,none": 0.22424242424242424,
						"acc_norm_stderr,none": 0.032568666616811015,
						"acc_stderr,none": 0.032568666616811015,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2535885167464115,
						"acc_norm,none": 0.2535885167464115,
						"acc_norm_stderr,none": 0.030166316298847997,
						"acc_stderr,none": 0.030166316298847997,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.03541088558070894,
						"acc_stderr,none": 0.03541088558070894,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2900763358778626,
						"acc_norm,none": 0.2900763358778626,
						"acc_norm_stderr,none": 0.03980066246467766,
						"acc_stderr,none": 0.03980066246467766,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.23529411764705882,
						"acc_norm,none": 0.23529411764705882,
						"acc_norm_stderr,none": 0.03650781710789269,
						"acc_stderr,none": 0.03650781710789269,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2897196261682243,
						"acc_norm,none": 0.2897196261682243,
						"acc_norm_stderr,none": 0.0440606533474851,
						"acc_stderr,none": 0.0440606533474851,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.25696594427244585,
						"acc_norm,none": 0.25696594427244585,
						"acc_norm_stderr,none": 0.02435085467633012,
						"acc_stderr,none": 0.02435085467633012,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.28921568627450983,
						"acc_norm,none": 0.28921568627450983,
						"acc_norm_stderr,none": 0.03182231867647553,
						"acc_stderr,none": 0.03182231867647553,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.21787709497206703,
						"acc_norm,none": 0.21787709497206703,
						"acc_norm_stderr,none": 0.030940924724402182,
						"acc_stderr,none": 0.030940924724402182,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.2109704641350211,
						"acc_norm,none": 0.2109704641350211,
						"acc_norm_stderr,none": 0.02655837250266192,
						"acc_stderr,none": 0.02655837250266192,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800375,
						"acc_stderr,none": 0.04142972007800375,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.308411214953271,
						"acc_norm,none": 0.308411214953271,
						"acc_norm_stderr,none": 0.04485760883316698,
						"acc_stderr,none": 0.04485760883316698,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.27358490566037735,
						"acc_norm,none": 0.27358490566037735,
						"acc_norm_stderr,none": 0.043505468189990605,
						"acc_stderr,none": 0.043505468189990605,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.1388888888888889,
						"acc_norm,none": 0.1388888888888889,
						"acc_norm_stderr,none": 0.033432700628696195,
						"acc_stderr,none": 0.033432700628696195,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.21904761904761905,
						"acc_norm,none": 0.21904761904761905,
						"acc_norm_stderr,none": 0.040556911537178254,
						"acc_stderr,none": 0.040556911537178254,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.040842473153370994,
						"acc_stderr,none": 0.040842473153370994,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2783882783882784,
						"acc_norm,none": 0.2783882783882784,
						"acc_norm_stderr,none": 0.027176455318754136,
						"acc_stderr,none": 0.027176455318754136,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2696078431372549,
						"acc_norm,none": 0.2696078431372549,
						"acc_norm_stderr,none": 0.03114557065948678,
						"acc_stderr,none": 0.03114557065948678,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.23976608187134502,
						"acc_norm,none": 0.23976608187134502,
						"acc_norm_stderr,none": 0.03274485211946956,
						"acc_stderr,none": 0.03274485211946956,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.272108843537415,
						"acc_norm,none": 0.272108843537415,
						"acc_norm_stderr,none": 0.036832239154550236,
						"acc_stderr,none": 0.036832239154550236,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.2446043165467626,
						"acc_norm,none": 0.2446043165467626,
						"acc_norm_stderr,none": 0.036591462225205665,
						"acc_stderr,none": 0.036591462225205665,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.03329493246449382,
						"acc_stderr,none": 0.03329493246449382,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2392638036809816,
						"acc_norm,none": 0.2392638036809816,
						"acc_norm_stderr,none": 0.0335195387952127,
						"acc_stderr,none": 0.0335195387952127,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.033113308926626096,
						"acc_stderr,none": 0.033113308926626096,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.28174603174603174,
						"acc_norm,none": 0.28174603174603174,
						"acc_norm_stderr,none": 0.028394293050790515,
						"acc_stderr,none": 0.028394293050790515,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.23232323232323232,
						"acc_norm,none": 0.23232323232323232,
						"acc_norm_stderr,none": 0.030088629490217487,
						"acc_stderr,none": 0.030088629490217487,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.29411764705882354,
						"acc_norm,none": 0.29411764705882354,
						"acc_norm_stderr,none": 0.0295973297309781,
						"acc_stderr,none": 0.0295973297309781,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.02725685083881996,
						"acc_stderr,none": 0.02725685083881996,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.23703703703703705,
						"acc_norm,none": 0.23703703703703705,
						"acc_norm_stderr,none": 0.03673731683969506,
						"acc_stderr,none": 0.03673731683969506,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.035356812290532405,
						"acc_stderr,none": 0.035356812290532405,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.24431818181818182,
						"acc_norm,none": 0.24431818181818182,
						"acc_norm_stderr,none": 0.03248092256353737,
						"acc_stderr,none": 0.03248092256353737,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2080536912751678,
						"acc_norm,none": 0.2080536912751678,
						"acc_norm_stderr,none": 0.03336604448346549,
						"acc_stderr,none": 0.03336604448346549,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.26627218934911245,
						"acc_norm,none": 0.26627218934911245,
						"acc_norm_stderr,none": 0.03410167836676976,
						"acc_stderr,none": 0.03410167836676976,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.037832495422898876,
						"acc_stderr,none": 0.037832495422898876,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2288135593220339,
						"acc_norm,none": 0.2288135593220339,
						"acc_norm_stderr,none": 0.0388353872453885,
						"acc_stderr,none": 0.0388353872453885,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.2804878048780488,
						"acc_norm,none": 0.2804878048780488,
						"acc_norm_stderr,none": 0.03518700228801578,
						"acc_stderr,none": 0.03518700228801578,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.23636363636363636,
						"acc_norm,none": 0.23636363636363636,
						"acc_norm_stderr,none": 0.04069306319721376,
						"acc_stderr,none": 0.04069306319721376,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.24475524475524477,
						"acc_norm,none": 0.24475524475524477,
						"acc_norm_stderr,none": 0.036079930330813775,
						"acc_stderr,none": 0.036079930330813775,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2619047619047619,
						"acc_norm,none": 0.2619047619047619,
						"acc_norm_stderr,none": 0.03932537680392871,
						"acc_stderr,none": 0.03932537680392871,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.2810810810810811,
						"acc_norm,none": 0.2810810810810811,
						"acc_norm_stderr,none": 0.03313956873549873,
						"acc_stderr,none": 0.03313956873549873,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.21511627906976744,
						"acc_norm,none": 0.21511627906976744,
						"acc_norm_stderr,none": 0.03142253684735939,
						"acc_stderr,none": 0.03142253684735939,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25060827250608275,
						"acc_norm,none": 0.25060827250608275,
						"acc_norm_stderr,none": 0.021402288814095338,
						"acc_stderr,none": 0.021402288814095338,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2757009345794392,
						"acc_norm,none": 0.2757009345794392,
						"acc_norm_stderr,none": 0.030618808026055613,
						"acc_stderr,none": 0.030618808026055613,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2601626016260163,
						"acc_norm,none": 0.2601626016260163,
						"acc_norm_stderr,none": 0.03972012975450537,
						"acc_stderr,none": 0.03972012975450537,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.29508196721311475,
						"acc_norm,none": 0.29508196721311475,
						"acc_norm_stderr,none": 0.041461781649012125,
						"acc_stderr,none": 0.041461781649012125,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2714285714285714,
						"acc_norm,none": 0.2714285714285714,
						"acc_norm_stderr,none": 0.03076030982422605,
						"acc_stderr,none": 0.03076030982422605,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03305282343736877,
						"acc_stderr,none": 0.03305282343736877,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.24867724867724866,
						"acc_norm,none": 0.24867724867724866,
						"acc_norm_stderr,none": 0.03152480234871162,
						"acc_stderr,none": 0.03152480234871162,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.23275862068965517,
						"acc_norm,none": 0.23275862068965517,
						"acc_norm_stderr,none": 0.039406691683377,
						"acc_stderr,none": 0.039406691683377,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.2689655172413793,
						"acc_norm,none": 0.2689655172413793,
						"acc_norm_stderr,none": 0.03695183311650232,
						"acc_stderr,none": 0.03695183311650232,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.24761904761904763,
						"acc_norm,none": 0.24761904761904763,
						"acc_norm_stderr,none": 0.04232473532055043,
						"acc_stderr,none": 0.04232473532055043,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.03424737867752743,
						"acc_stderr,none": 0.03424737867752743,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27014218009478674,
						"acc_norm,none": 0.27014218009478674,
						"acc_norm_stderr,none": 0.030641194076293145,
						"acc_stderr,none": 0.030641194076293145,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.2579787234042553,
						"acc_norm,none": 0.2579787234042553,
						"acc_norm_stderr,none": 0.02259355080105626,
						"acc_stderr,none": 0.02259355080105626,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.24568965517241378,
						"acc_norm,none": 0.24568965517241378,
						"acc_norm_stderr,none": 0.028324514684171135,
						"acc_stderr,none": 0.028324514684171135,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.03329115112144781,
						"acc_stderr,none": 0.03329115112144781,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.23703703703703705,
						"acc_norm,none": 0.23703703703703705,
						"acc_norm_stderr,none": 0.03673731683969506,
						"acc_stderr,none": 0.03673731683969506,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.2610619469026549,
						"acc_norm,none": 0.2610619469026549,
						"acc_norm_stderr,none": 0.029280908211631717,
						"acc_stderr,none": 0.029280908211631717,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.23030303030303031,
						"acc_norm,none": 0.23030303030303031,
						"acc_norm_stderr,none": 0.032876667586034886,
						"acc_stderr,none": 0.032876667586034886,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2648648648648649,
						"acc_norm,none": 0.2648648648648649,
						"acc_norm_stderr,none": 0.03253020905593336,
						"acc_stderr,none": 0.03253020905593336,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.1952662721893491,
						"acc_norm,none": 0.1952662721893491,
						"acc_norm_stderr,none": 0.0305833516739231,
						"acc_stderr,none": 0.0305833516739231,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2484472049689441,
						"acc_norm,none": 0.2484472049689441,
						"acc_norm_stderr,none": 0.03416149068322981,
						"acc_stderr,none": 0.03416149068322981,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.275,
						"acc_norm,none": 0.275,
						"acc_norm_stderr,none": 0.035410885580708956,
						"acc_stderr,none": 0.035410885580708956,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.010223886506275306,
						"mcc_stderr,none": 0.031050885644258095
					},
					"copa": {
						"acc,none": 0.84,
						"acc_stderr,none": 0.036845294917747115,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.643317680381634,
						"likelihood_diff_stderr,none": 0.43363916971201094,
						"pct_stereotype,none": 0.5854203935599285,
						"pct_stereotype_stderr,none": 0.074218934797104
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.627906976744186,
						"likelihood_diff_stderr,none": 0.08551533329199569,
						"pct_stereotype,none": 0.6356589147286822,
						"pct_stereotype_stderr,none": 0.011755176051187694
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.8846153846153846,
						"likelihood_diff_stderr,none": 0.3766813688884809,
						"pct_stereotype,none": 0.7252747252747253,
						"pct_stereotype_stderr,none": 0.047052133987784364
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 4.636363636363637,
						"likelihood_diff_stderr,none": 1.7107161035007707,
						"pct_stereotype,none": 0.7272727272727273,
						"pct_stereotype_stderr,none": 0.14083575804390605
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.323076923076923,
						"likelihood_diff_stderr,none": 0.5802934404859371,
						"pct_stereotype,none": 0.676923076923077,
						"pct_stereotype_stderr,none": 0.05845647751373333
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.6,
						"likelihood_diff_stderr,none": 0.1551120395326995,
						"pct_stereotype,none": 0.634375,
						"pct_stereotype_stderr,none": 0.026964702306061943
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.560185185185185,
						"likelihood_diff_stderr,none": 0.21025752700281436,
						"pct_stereotype,none": 0.5879629629629629,
						"pct_stereotype_stderr,none": 0.03356787758160831
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 3.8784722222222223,
						"likelihood_diff_stderr,none": 0.39753596798532265,
						"pct_stereotype,none": 0.75,
						"pct_stereotype_stderr,none": 0.051389153237064875
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.5184547244094486,
						"likelihood_diff_stderr,none": 0.1520038319997533,
						"pct_stereotype,none": 0.5492125984251969,
						"pct_stereotype_stderr,none": 0.02209795835867595
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.4031531531531534,
						"likelihood_diff_stderr,none": 0.3115215422495467,
						"pct_stereotype,none": 0.6936936936936937,
						"pct_stereotype_stderr,none": 0.04395066997351522
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.545698924731183,
						"likelihood_diff_stderr,none": 0.4487560763697384,
						"pct_stereotype,none": 0.8279569892473119,
						"pct_stereotype_stderr,none": 0.039348528120618655
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.261184210526316,
						"likelihood_diff_stderr,none": 0.26477789531978657,
						"pct_stereotype,none": 0.6736842105263158,
						"pct_stereotype_stderr,none": 0.0341048643533449
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.6536225402504474,
						"likelihood_diff_stderr,none": 0.0856349942031604,
						"pct_stereotype,none": 0.5360763267740012,
						"pct_stereotype_stderr,none": 0.012181466483312614
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.2916666666666665,
						"likelihood_diff_stderr,none": 0.28640246731373625,
						"pct_stereotype,none": 0.4666666666666667,
						"pct_stereotype_stderr,none": 0.05288198530254015
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 4.288461538461538,
						"likelihood_diff_stderr,none": 1.8131566347804926,
						"pct_stereotype,none": 0.38461538461538464,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.462121212121212,
						"likelihood_diff_stderr,none": 0.5263799276304062,
						"pct_stereotype,none": 0.6363636363636364,
						"pct_stereotype_stderr,none": 0.059666374846717586
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.3006230529595015,
						"likelihood_diff_stderr,none": 0.17764680031972918,
						"pct_stereotype,none": 0.5264797507788161,
						"pct_stereotype_stderr,none": 0.027911625198936637
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.8873517786561265,
						"likelihood_diff_stderr,none": 0.2143543004081145,
						"pct_stereotype,none": 0.383399209486166,
						"pct_stereotype_stderr,none": 0.030628616122857777
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.329861111111111,
						"likelihood_diff_stderr,none": 0.3963295749325047,
						"pct_stereotype,none": 0.5694444444444444,
						"pct_stereotype_stderr,none": 0.05876396677084613
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.414945652173913,
						"likelihood_diff_stderr,none": 0.15942550244245618,
						"pct_stereotype,none": 0.4891304347826087,
						"pct_stereotype_stderr,none": 0.023332486098156545
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.4478260869565216,
						"likelihood_diff_stderr,none": 0.3370760841985869,
						"pct_stereotype,none": 0.6434782608695652,
						"pct_stereotype_stderr,none": 0.04485981954131494
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 4.2760989010989015,
						"likelihood_diff_stderr,none": 0.34741609261138007,
						"pct_stereotype,none": 0.8131868131868132,
						"pct_stereotype_stderr,none": 0.04108446855035881
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.332908163265306,
						"likelihood_diff_stderr,none": 0.29235484675191176,
						"pct_stereotype,none": 0.6632653061224489,
						"pct_stereotype_stderr,none": 0.03384311010566736
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.07283464566929133,
						"exact_match_stderr,none": 0.0057662390329137805
					},
					"glue": {
						"acc,none": 0.47479626778111467,
						"acc_stderr,none": 0.07710068131458633,
						"alias": "glue",
						"f1,none": 0.28579319078788673,
						"f1_stderr,none": 0.0019606327113536547,
						"mcc,none": 0.010223886506275306,
						"mcc_stderr,none": 0.0009641574992927934
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.039423805913570885,
						"exact_match_stderr,get-answer": 0.005360280030342446
					},
					"hellaswag": {
						"acc,none": 0.5246962756423024,
						"acc_norm,none": 0.7012547301334395,
						"acc_norm_stderr,none": 0.004567724872057188,
						"acc_stderr,none": 0.004983691099110912,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.15114062951198382,
						"acc_norm,none": 0.15114062951198382,
						"acc_norm_stderr,none": 0.04219089042469381,
						"acc_stderr,none": 0.04219089042469381,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.04093601807403326,
						"acc_stderr,none": 0.04093601807403326,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.116,
						"acc_norm,none": 0.116,
						"acc_norm_stderr,none": 0.010131468138757004,
						"acc_stderr,none": 0.010131468138757004,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.127,
						"acc_norm,none": 0.127,
						"acc_norm_stderr,none": 0.01053479862085574,
						"acc_stderr,none": 0.01053479862085574,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.225,
						"acc_norm,none": 0.225,
						"acc_norm_stderr,none": 0.013211720158614751,
						"acc_stderr,none": 0.013211720158614751,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.208,
						"acc_norm,none": 0.208,
						"acc_norm_stderr,none": 0.012841374572096916,
						"acc_stderr,none": 0.012841374572096916,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.18,
						"acc_norm,none": 0.18,
						"acc_norm_stderr,none": 0.015697473824603854,
						"acc_stderr,none": 0.015697473824603854,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.083,
						"acc_norm,none": 0.083,
						"acc_norm_stderr,none": 0.008728527206074787,
						"acc_stderr,none": 0.008728527206074787,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.128,
						"acc_norm,none": 0.128,
						"acc_norm_stderr,none": 0.010570133761108665,
						"acc_stderr,none": 0.010570133761108665,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.09,
						"acc_norm,none": 0.09,
						"acc_norm_stderr,none": 0.009054390204866435,
						"acc_stderr,none": 0.009054390204866435,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.088,
						"acc_norm,none": 0.088,
						"acc_norm_stderr,none": 0.008963053962592076,
						"acc_stderr,none": 0.008963053962592076,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.3076923076923077,
						"acc_norm,none": 0.3076923076923077,
						"acc_norm_stderr,none": 0.04063619567656727,
						"acc_stderr,none": 0.04063619567656727,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.042295258468165065,
						"acc_stderr,none": 0.042295258468165065,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.152,
						"acc_norm,none": 0.152,
						"acc_norm_stderr,none": 0.011358918303475294,
						"acc_stderr,none": 0.011358918303475294,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.132,
						"acc_norm,none": 0.132,
						"acc_norm_stderr,none": 0.010709373963528024,
						"acc_stderr,none": 0.010709373963528024,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.208,
						"acc_norm,none": 0.208,
						"acc_norm_stderr,none": 0.012841374572096921,
						"acc_stderr,none": 0.012841374572096921,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.084,
						"acc_norm,none": 0.084,
						"acc_norm_stderr,none": 0.008776162089491116,
						"acc_stderr,none": 0.008776162089491116,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.149,
						"acc_norm,none": 0.149,
						"acc_norm_stderr,none": 0.01126614068463216,
						"acc_stderr,none": 0.01126614068463216,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.138,
						"acc_norm,none": 0.138,
						"acc_norm_stderr,none": 0.010912152632504392,
						"acc_stderr,none": 0.010912152632504392,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.164,
						"acc_norm,none": 0.164,
						"acc_norm_stderr,none": 0.01171500069318131,
						"acc_stderr,none": 0.01171500069318131,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.153,
						"acc_norm,none": 0.153,
						"acc_norm_stderr,none": 0.011389500459665547,
						"acc_stderr,none": 0.011389500459665547,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.04093601807403326,
						"acc_stderr,none": 0.04093601807403326,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.108,
						"acc_norm,none": 0.108,
						"acc_norm_stderr,none": 0.009820001651345691,
						"acc_stderr,none": 0.009820001651345691,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.134,
						"acc_norm,none": 0.134,
						"acc_norm_stderr,none": 0.010777762298369674,
						"acc_stderr,none": 0.010777762298369674,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.098,
						"acc_norm,none": 0.098,
						"acc_norm_stderr,none": 0.009406619184621226,
						"acc_stderr,none": 0.009406619184621226,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.237,
						"acc_norm,none": 0.237,
						"acc_norm_stderr,none": 0.013454070462577954,
						"acc_stderr,none": 0.013454070462577954,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.124,
						"acc_norm,none": 0.124,
						"acc_norm_stderr,none": 0.010427498872343963,
						"acc_stderr,none": 0.010427498872343963,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.012655439943366665,
						"acc_stderr,none": 0.012655439943366665,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.015865408450741195,
						"acc_stderr,none": 0.015865408450741195,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.116,
						"acc_norm,none": 0.116,
						"acc_norm_stderr,none": 0.010131468138756995,
						"acc_stderr,none": 0.010131468138756995,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.154,
						"acc_norm,none": 0.154,
						"acc_norm_stderr,none": 0.011419913065098687,
						"acc_stderr,none": 0.011419913065098687,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.135,
						"acc_norm,none": 0.135,
						"acc_norm_stderr,none": 0.010811655372416054,
						"acc_stderr,none": 0.010811655372416054,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.152,
						"acc_norm,none": 0.152,
						"acc_norm_stderr,none": 0.011358918303475287,
						"acc_stderr,none": 0.011358918303475287,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.21666666666666667,
						"acc_norm,none": 0.21666666666666667,
						"acc_norm_stderr,none": 0.02382504669967184,
						"acc_stderr,none": 0.02382504669967184,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.221,
						"acc_norm,none": 0.221,
						"acc_norm_stderr,none": 0.013127502859696235,
						"acc_stderr,none": 0.013127502859696235,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.107,
						"acc_norm,none": 0.107,
						"acc_norm_stderr,none": 0.009779910359847169,
						"acc_stderr,none": 0.009779910359847169,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.168,
						"acc_norm,none": 0.168,
						"acc_norm_stderr,none": 0.01182860583145425,
						"acc_stderr,none": 0.01182860583145425,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.185,
						"acc_norm,none": 0.185,
						"acc_norm_stderr,none": 0.027525684670556556,
						"acc_stderr,none": 0.027525684670556556,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.182,
						"acc_norm,none": 0.182,
						"acc_norm_stderr,none": 0.012207580637662165,
						"acc_stderr,none": 0.012207580637662165,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.182,
						"acc_norm,none": 0.182,
						"acc_norm_stderr,none": 0.012207580637662146,
						"acc_stderr,none": 0.012207580637662146,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.21,
						"acc_norm,none": 0.21,
						"acc_norm_stderr,none": 0.028873315391699354,
						"acc_stderr,none": 0.028873315391699354,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.136,
						"acc_norm,none": 0.136,
						"acc_norm_stderr,none": 0.01084535023047299,
						"acc_stderr,none": 0.01084535023047299,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.4819118614338961,
						"acc_norm,none": 0.438,
						"acc_norm_stderr,none": 0.0004932985971943903,
						"acc_stderr,none": 0.04776314369424466,
						"alias": "kobest",
						"f1,none": 0.3846770639557548,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5028490028490028,
						"acc_stderr,none": 0.013348550797680823,
						"alias": " - kobest_boolq",
						"f1,none": 0.3371320037986705,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.501,
						"acc_stderr,none": 0.01581926829057682,
						"alias": " - kobest_copa",
						"f1,none": 0.5000756398580977,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.328,
						"acc_norm,none": 0.438,
						"acc_norm_stderr,none": 0.022210326363977417,
						"acc_stderr,none": 0.021017027165175495,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.32216295826341423,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.5340050377833753,
						"acc_stderr,none": 0.025067769630661912,
						"alias": " - kobest_sentineg",
						"f1,none": 0.5207597893679079,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6549582767320008,
						"acc_stderr,none": 0.02448321431391065,
						"alias": "lambada",
						"perplexity,none": 4.919983099161434,
						"perplexity_stderr,none": 0.47080234373481183
					},
					"lambada_cloze": {
						"acc,none": 0.03793906462254997,
						"acc_stderr,none": 0.006092491808066035,
						"alias": "lambada_cloze",
						"perplexity,none": 302.74656013590857,
						"perplexity_stderr,none": 36.1742705293389
					},
					"lambada_multilingual": {
						"acc,none": 0.43578497962352025,
						"acc_stderr,none": 0.07783182786033929,
						"alias": "lambada_multilingual",
						"perplexity,none": 55.75634065211411,
						"perplexity_stderr,none": 17.112832544800874
					},
					"lambada_openai": {
						"acc,none": 0.7023093343683291,
						"acc_stderr,none": 0.006370285573012025,
						"alias": " - lambada_openai",
						"perplexity,none": 4.004151656494574,
						"perplexity_stderr,none": 0.0853963706849589
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.026974577915777218,
						"acc_stderr,none": 0.0022571036096265327,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 233.16582386462363,
						"perplexity_stderr,none": 7.59103270375321
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.318067145352222,
						"acc_stderr,none": 0.006488469772173893,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 87.03760418597274,
						"perplexity_stderr,none": 5.220743099786189
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.7003687172520862,
						"acc_stderr,none": 0.006382179569794074,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.00575099472505,
						"perplexity_stderr,none": 0.08559465754530345
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.3483407723656123,
						"acc_stderr,none": 0.006637805195772818,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 74.50603551865778,
						"perplexity_stderr,none": 4.146635362251485
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.42227828449446925,
						"acc_stderr,none": 0.006881304773376873,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 47.60819762333609,
						"perplexity_stderr,none": 2.6897251543883476
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.3898699786532117,
						"acc_stderr,none": 0.006794901529888746,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 65.62411493787893,
						"perplexity_stderr,none": 3.9555857520848434
					},
					"lambada_standard": {
						"acc,none": 0.6078012808072967,
						"acc_stderr,none": 0.006802146227117816,
						"alias": " - lambada_standard",
						"perplexity,none": 5.833042926471127,
						"perplexity_stderr,none": 0.13814813398814993
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.048903551329322725,
						"acc_stderr,none": 0.003004654580034688,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 372.3272964071935,
						"perplexity_stderr,none": 11.77185708590048
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.2595419847328244,
						"exact_match_stderr,get-answer": 0.011060275310259944
					},
					"logiqa": {
						"acc,none": 0.22887864823348694,
						"acc_norm,none": 0.26881720430107525,
						"acc_norm_stderr,none": 0.017389409463712625,
						"acc_stderr,none": 0.016478107276313273,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2430025445292621,
						"acc_norm,none": 0.2900763358778626,
						"acc_norm_stderr,none": 0.011449166849225307,
						"acc_stderr,none": 0.010820928512725118,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2623115577889447,
						"acc_norm,none": 0.2549413735343384,
						"acc_norm_stderr,none": 0.007978403103631434,
						"acc_stderr,none": 0.008052779240636127,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3746028383816988,
						"acc_stderr,none": 0.004981435208169745,
						"alias": "mc_taco",
						"f1,none": 0.5140317669327628,
						"f1_stderr,none": 0.005529744729456079
					},
					"medmcqa": {
						"acc,none": 0.2832895051398518,
						"acc_norm,none": 0.2832895051398518,
						"acc_norm_stderr,none": 0.006967790922104441,
						"acc_stderr,none": 0.006967790922104441,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.2623723487824038,
						"acc_norm,none": 0.2623723487824038,
						"acc_norm_stderr,none": 0.012334855614561548,
						"acc_stderr,none": 0.012334855614561548,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.2617860703603475,
						"acc_stderr,none": 0.03633266968838108,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.3037037037037037,
						"acc_stderr,none": 0.03972552884785137,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.29605263157894735,
						"acc_stderr,none": 0.03715062154998904,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.0440844002276808,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.2490566037735849,
						"acc_stderr,none": 0.026616482980501715,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2708333333333333,
						"acc_stderr,none": 0.03716177437566017,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.22,
						"acc_stderr,none": 0.04163331998932269,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.2774566473988439,
						"acc_stderr,none": 0.034140140070440354,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.22549019607843138,
						"acc_stderr,none": 0.041583075330832865,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.23829787234042554,
						"acc_stderr,none": 0.02785125297388978,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.21929824561403508,
						"acc_stderr,none": 0.03892431106518754,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2413793103448276,
						"acc_stderr,none": 0.03565998174135303,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.25132275132275134,
						"acc_stderr,none": 0.022340482339643895,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.04006168083848877,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.23870967741935484,
						"acc_stderr,none": 0.024251071262208834,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.24630541871921183,
						"acc_stderr,none": 0.03031509928561773,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.0347769116216366,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.25757575757575757,
						"acc_stderr,none": 0.03115626951964684,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.26424870466321243,
						"acc_stderr,none": 0.03182155050916647,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.26666666666666666,
						"acc_stderr,none": 0.022421273612923714,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.02671924078371216,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.29831932773109243,
						"acc_stderr,none": 0.02971914287634286,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.271523178807947,
						"acc_stderr,none": 0.036313298039696525,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.24587155963302754,
						"acc_stderr,none": 0.01846194096870844,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.30092592592592593,
						"acc_stderr,none": 0.031280390843298804,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.28921568627450983,
						"acc_stderr,none": 0.03182231867647553,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.27848101265822783,
						"acc_stderr,none": 0.02917868230484256,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.22869955156950672,
						"acc_stderr,none": 0.028188240046929196,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.21374045801526717,
						"acc_stderr,none": 0.0359546161177469,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.2680127523910733,
						"acc_stderr,none": 0.028488960342705927,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.04065578140908705,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3055555555555556,
						"acc_stderr,none": 0.04453197507374983,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.27607361963190186,
						"acc_stderr,none": 0.03512385283705051,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.19642857142857142,
						"acc_stderr,none": 0.03770970049347018,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.1650485436893204,
						"acc_stderr,none": 0.036756688322331886,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.3076923076923077,
						"acc_stderr,none": 0.0302363899421731,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909283,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.2848020434227331,
						"acc_stderr,none": 0.016139174096522567,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.2947976878612717,
						"acc_stderr,none": 0.02454761779480383,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.23575418994413408,
						"acc_stderr,none": 0.014196375686290804,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.26143790849673204,
						"acc_stderr,none": 0.025160998214292456,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.25651754103636953,
						"acc_stderr,none": 0.04113627785751167,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2508038585209003,
						"acc_stderr,none": 0.024619771956697168,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.2993827160493827,
						"acc_stderr,none": 0.02548311560119547,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.2695035460992908,
						"acc_stderr,none": 0.026469036818590627,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2653194263363755,
						"acc_stderr,none": 0.011276198843958878,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.14705882352941177,
						"acc_stderr,none": 0.021513964052859623,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.01751781884501444,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.04265792110940589,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.24489795918367346,
						"acc_stderr,none": 0.02752963744017493,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2557686057848554,
						"acc_stderr,none": 0.03273477855480962,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.23880597014925373,
						"acc_stderr,none": 0.03014777593540922,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.263558515699334,
						"acc_stderr,none": 0.04364865601901659,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.046882617226215034,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.25903614457831325,
						"acc_stderr,none": 0.03410646614071857,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.29239766081871343,
						"acc_stderr,none": 0.03488647713457921,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.33601630157921547,
						"acc_stderr,none": 0.0047679957036028415,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.32699349064279903,
						"acc_stderr,none": 0.004731298382913875,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6053921568627451,
						"acc_stderr,none": 0.024227245879965408,
						"alias": "mrpc",
						"f1,none": 0.7190226876090751,
						"f1_stderr,none": 0.02133921893898719
					},
					"multimedqa": {
						"acc,none": 0.30731014904187365,
						"acc_norm,none": 0.28086754518989626,
						"acc_norm_stderr,none": 9.552328148792985e-05,
						"acc_stderr,none": 0.10566243428903258,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5563118811881188,
						"acc_stderr,none": 0.007136110353643632,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6900865328634834,
						"mrr_stderr,none": 0.010302666994788365,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.43905191873589167,
						"r@2_stderr,none": 0.016681981598282936
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6489842004283556,
						"mrr_stderr,none": 0.010411729734569615,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.46952595936794583,
						"r@2_stderr,none": 0.01677607028749662
					},
					"openbookqa": {
						"acc,none": 0.292,
						"acc_norm,none": 0.406,
						"acc_norm_stderr,none": 0.021983962090086333,
						"acc_stderr,none": 0.02035437548053008,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4565,
						"acc_stderr,none": 0.011140733053371408,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.445,
						"acc_stderr,none": 0.011115272135099207,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.412,
						"acc_stderr,none": 0.011008569130325172,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.531,
						"acc_stderr,none": 0.011161621338114474,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.4825,
						"acc_stderr,none": 0.01117628425125418,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.011170245619215438,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5305,
						"acc_stderr,none": 0.011162310405413182,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4830714285714286,
						"acc_stderr,none": 0.0307277164128183,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7709466811751904,
						"acc_norm,none": 0.7747551686615887,
						"acc_norm_stderr,none": 0.009746643471032154,
						"acc_stderr,none": 0.009804509865175504,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.2243808710503843,
						"acc_norm,none": 0.2752455166524338,
						"acc_norm_stderr,none": 0.003263087603897235,
						"acc_stderr,none": 0.0030478270686255175,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.69,
						"acc_stderr,none": 0.020704041021724795,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7160772540226711,
						"acc_norm,none": 0.5958619336483942,
						"acc_norm_stderr,none": 0.004598168838341248,
						"acc_stderr,none": 0.15025462680941115,
						"alias": "pythia",
						"bits_per_byte,none": 0.6194402405334403,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.536278995998468,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.004151656494574,
						"perplexity_stderr,none": 0.0853963706849589,
						"word_perplexity,none": 9.934353680376269,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.4024822695035461,
						"acc_norm,none": 0.4592198581560284,
						"acc_norm_stderr,none": 0.06096183797928268,
						"acc_stderr,none": 0.044708898258928376,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4666666666666667,
						"acc_norm,none": 0.575,
						"acc_norm_stderr,none": 0.04531634835874828,
						"acc_stderr,none": 0.0457329560380023,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.3625,
						"acc_norm,none": 0.48125,
						"acc_norm_stderr,none": 0.03962468875738329,
						"acc_stderr,none": 0.0381237434064489,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.397887323943662,
						"acc_norm,none": 0.397887323943662,
						"acc_norm_stderr,none": 0.029095492917064893,
						"acc_stderr,none": 0.029095492917064907,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5304777594728172,
						"acc_stderr,none": 0.006752830158916003,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.529656195894138,
						"acc_stderr,none": 0.0024823227215194157,
						"alias": "qqp",
						"f1,none": 0.28133030990173846,
						"f1_stderr,none": 0.003620581003001898
					},
					"race": {
						"acc,none": 0.38181818181818183,
						"acc_stderr,none": 0.015036133294674136,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.516245487364621,
						"acc_stderr,none": 0.030080573208738064,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.925,
						"acc_norm,none": 0.895,
						"acc_norm_stderr,none": 0.009698921026024954,
						"acc_stderr,none": 0.008333333333333366,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.516245487364621,
						"acc_stderr,none": 0.030080573208738064,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.6318807339449541,
						"acc_stderr,none": 0.01634190769779869,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.549335199440168,
						"acc_norm,none": 0.7463760871738478,
						"acc_norm_stderr,none": 0.0030761299614220014,
						"acc_stderr,none": 0.0035178413981851744,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5150577351835214,
						"acc_stderr,none": 0.012920415764871355,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5055088141025641,
						"acc_stderr,none": 0.0050039516886131365,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.49559136515658253,
						"acc_stderr,none": 0.005033644799289794,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.543235294117647,
						"acc_stderr,none": 0.004932436043212622,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.2957315334460171,
						"acc_stderr,none": 0.03485008104638614,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.2876376988984088,
						"bleu_acc_stderr,none": 0.0002511057022926932,
						"bleu_diff,none": -10.251930996798425,
						"bleu_diff_stderr,none": 0.6423983995999273,
						"bleu_max,none": 25.04500918259965,
						"bleu_max_stderr,none": 0.5666597483767061,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.00024182670675721596,
						"rouge1_diff,none": -12.87367992958503,
						"rouge1_diff_stderr,none": 0.7610821487355909,
						"rouge1_max,none": 49.34764427245758,
						"rouge1_max_stderr,none": 0.7689288016132617,
						"rouge2_acc,none": 0.211750305997552,
						"rouge2_acc_stderr,none": 0.00020454915920036063,
						"rouge2_diff,none": -14.790526943078385,
						"rouge2_diff_stderr,none": 1.0637993438666953,
						"rouge2_max,none": 33.001995528488436,
						"rouge2_max_stderr,none": 0.9809341676009351,
						"rougeL_acc,none": 0.25703794369645044,
						"rougeL_acc_stderr,none": 0.00023403117548621337,
						"rougeL_diff,none": -12.982393096206822,
						"rougeL_diff_stderr,none": 0.7679664439786057,
						"rougeL_max,none": 46.578606282191004,
						"rougeL_max_stderr,none": 0.7764362289690075
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.2876376988984088,
						"bleu_acc_stderr,none": 0.01584631510139481,
						"bleu_diff,none": -10.251930996798425,
						"bleu_diff_stderr,none": 0.8014975980000983,
						"bleu_max,none": 25.04500918259965,
						"bleu_max_stderr,none": 0.7527680574896268,
						"rouge1_acc,none": 0.27050183598531213,
						"rouge1_acc_stderr,none": 0.015550778332842892,
						"rouge1_diff,none": -12.87367992958503,
						"rouge1_diff_stderr,none": 0.8724002227966192,
						"rouge1_max,none": 49.34764427245758,
						"rouge1_max_stderr,none": 0.8768858543808662,
						"rouge2_acc,none": 0.211750305997552,
						"rouge2_acc_stderr,none": 0.014302068353925617,
						"rouge2_diff,none": -14.790526943078385,
						"rouge2_diff_stderr,none": 1.0314064881833425,
						"rouge2_max,none": 33.001995528488436,
						"rouge2_max_stderr,none": 0.9904212071643737,
						"rougeL_acc,none": 0.25703794369645044,
						"rougeL_acc_stderr,none": 0.015298077509485085,
						"rougeL_diff,none": -12.982393096206822,
						"rougeL_diff_stderr,none": 0.8763369466013662,
						"rougeL_max,none": 46.578606282191004,
						"rougeL_max_stderr,none": 0.8811561887480605
					},
					"truthfulqa_mc1": {
						"acc,none": 0.22766217870257038,
						"acc_stderr,none": 0.014679255032111068,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.32976621081774043,
						"acc_stderr,none": 0.013002499588973683,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.07283464566929133,
						"exact_match_stderr,none": 0.0057662390329137805
					},
					"wic": {
						"acc,none": 0.493730407523511,
						"acc_stderr,none": 0.01980916380119652,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6194402405334403,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.536278995998468,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 9.934353680376269,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.6503551696921863,
						"acc_stderr,none": 0.013402073680850514,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.5915492957746479,
						"acc_stderr,none": 0.05875113694257524,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.6346153846153846,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.8131868131868132,
						"acc_stderr,none": 0.023632761722644554,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5254545454545455,
						"acc_stderr,none": 0.036407165846333675,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.492,
						"acc_stderr,none": 0.022380208834928035,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.54,
						"acc_stderr,none": 0.02231133324528966,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.604,
						"acc_stderr,none": 0.021893529941665813,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.478,
						"acc_stderr,none": 0.02236139673920788,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.522,
						"acc_stderr,none": 0.02236139673920788,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.546,
						"acc_stderr,none": 0.02228814759117695,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044292,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.514,
						"acc_stderr,none": 0.02237429816635319,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.556,
						"acc_stderr,none": 0.02224224437573102,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.3827309236947791,
						"acc_stderr,none": 0.05194928176239464,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.344578313253012,
						"acc_stderr,none": 0.00952559090011065,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.37028112449799194,
						"acc_stderr,none": 0.00967891540984029,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.4461847389558233,
						"acc_stderr,none": 0.00996385427413916,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3481927710843373,
						"acc_stderr,none": 0.009548980649153386,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5357429718875502,
						"acc_stderr,none": 0.009996432468510355,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.41967871485943775,
						"acc_stderr,none": 0.009891912665432372,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.46947791164658637,
						"acc_stderr,none": 0.010003382355314755,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3405622489959839,
						"acc_stderr,none": 0.009498886690274447,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4433734939759036,
						"acc_stderr,none": 0.009957592660538648,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3369477911646586,
						"acc_stderr,none": 0.009474203778757722,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3405622489959839,
						"acc_stderr,none": 0.009498886690274442,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3437751004016064,
						"acc_stderr,none": 0.009520310502882934,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3321285140562249,
						"acc_stderr,none": 0.009440328001240636,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3309236947791165,
						"acc_stderr,none": 0.009431685461463288,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3385542168674699,
						"acc_stderr,none": 0.009485250208516876,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5524336682510078,
						"acc_stderr,none": 0.06791399332607427,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.4798146922567836,
						"acc_stderr,none": 0.01285663570649829,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7485109199205824,
						"acc_stderr,none": 0.011165293988715807,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6393117140966248,
						"acc_stderr,none": 0.012357592682139025,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.514228987425546,
						"acc_stderr,none": 0.012861913999596127,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.513567174056916,
						"acc_stderr,none": 0.01286238758665008,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.513567174056916,
						"acc_stderr,none": 0.01286238758665008,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.48974189278623426,
						"acc_stderr,none": 0.012864417047980477,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5823957643944407,
						"acc_stderr,none": 0.012691211382848643,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.514228987425546,
						"acc_stderr,none": 0.012861913999596127,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5327597617471873,
						"acc_stderr,none": 0.012839477563855927,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5486432825943084,
						"acc_stderr,none": 0.0128060889661224,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.74331310406833,
						"acc_stderr,none": 0.08408219267029617,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8718279569892473,
						"acc_stderr,none": 0.006934162057729827,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6385542168674698,
						"acc_stderr,none": 0.053053439348320096,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5620437956204379,
						"acc_stderr,none": 0.016029414748731596,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6653992395437263,
						"acc_stderr,none": 0.02915103415331038,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6222222222222222,
						"acc_stderr,none": 0.0273606328610564,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.628968253968254,
						"acc_stderr,none": 0.02153951426767635,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "togethercomputer/RedPajama-INCITE-7B-Base"
	},
	"togethercomputer/RedPajama-INCITE-7B-Chat": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.5529875986471252,
						"acc_norm,none": 0.5535512965050733,
						"acc_norm_stderr,none": 0.038918178643137395,
						"acc_stderr,none": 0.04542179924454409,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3478125,
						"acc_stderr,none": 0.016827375056145368,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.0163,
						"acc_stderr,none": 0.01793739296998913,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.6984328358208955,
						"acc_stderr,none": 0.12960776697461782,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.23699851411589895,
						"acc_norm,none": 0.23699851411589895,
						"acc_norm_stderr,none": 0.120297153104536,
						"acc_stderr,none": 0.120297153104536,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.24736660335002592,
						"acc_norm,none": 0.24736660335002592,
						"acc_norm_stderr,none": 0.04073900074538987,
						"acc_stderr,none": 0.04073900074538987,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 9.684294871794872,
						"likelihood_diff_stderr,none": 0.8716334090272987,
						"pct_stereotype,none": 0.5494931425163982,
						"pct_stereotype_stderr,none": 0.06468297465811357
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.04675196850393701,
						"exact_match_stderr,none": 0.004684335017570898
					},
					"glue": {
						"acc,none": 0.40714857213674177,
						"acc_stderr,none": 0.05584131660910533,
						"alias": "glue",
						"f1,none": 0.4559448662253682,
						"f1_stderr,none": 0.0011006139595858319,
						"mcc,none": -0.02929206145132745,
						"mcc_stderr,none": 0.0001699530356106545
					},
					"kmmlu": {
						"acc,none": 0.2017614784868611,
						"acc_norm,none": 0.2017614784868611,
						"acc_norm_stderr,none": 0.027959380698846664,
						"acc_stderr,none": 0.027959380698846664,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.48454286340714753,
						"acc_norm,none": 0.436,
						"acc_norm_stderr,none": 0.0004927935871743444,
						"acc_stderr,none": 0.037575934484114504,
						"alias": "kobest",
						"f1,none": 0.4594032241773939,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5022317096836794,
						"acc_stderr,none": 0.02888484280774511,
						"alias": "lambada",
						"perplexity,none": 26.844288482467036,
						"perplexity_stderr,none": 7.112567317232621
					},
					"lambada_cloze": {
						"acc,none": 0.0474480884921405,
						"acc_stderr,none": 0.006144316666335166,
						"alias": "lambada_cloze",
						"perplexity,none": 34604.264230774505,
						"perplexity_stderr,none": 13529.118944967058
					},
					"lambada_multilingual": {
						"acc,none": 0.3100329904909761,
						"acc_stderr,none": 0.07030764008109443,
						"alias": "lambada_multilingual",
						"perplexity,none": 1539.6668436962125,
						"perplexity_stderr,none": 637.4153305857243
					},
					"mmlu": {
						"acc,none": 0.26442102264634665,
						"acc_stderr,none": 0.0400659570556117,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.26971307120085014,
						"acc_stderr,none": 0.0328318856965235,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.2774380431284197,
						"acc_stderr,none": 0.046228362938913345,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2596685082872928,
						"acc_stderr,none": 0.03490150431857856,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.2483349191246432,
						"acc_stderr,none": 0.0444512172922875,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.30333569907735985,
						"acc_norm,none": 0.2738906488825624,
						"acc_norm_stderr,none": 0.00013865662123225387,
						"acc_stderr,none": 0.09004895699791461,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.4774285714285714,
						"acc_stderr,none": 0.0164166009344586,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.6183426651297151,
						"acc_norm,none": 0.5580943692382687,
						"acc_norm_stderr,none": 0.0033940005642664273,
						"acc_stderr,none": 0.12331965059934231,
						"alias": "pythia",
						"bits_per_byte,none": 1.1244889239038887,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 2.1802429753546315,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 13.204578722248053,
						"perplexity_stderr,none": 0.7151533605421062,
						"word_perplexity,none": 64.5859752567749,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3670212765957447,
						"acc_norm,none": 0.4308510638297872,
						"acc_norm_stderr,none": 0.07506396300310132,
						"acc_stderr,none": 0.046742342657368985,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5252737013743303,
						"acc_stderr,none": 0.01400175046915762,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.31755435383640956,
						"acc_stderr,none": 0.04394825005271716,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3929008567931457,
						"bleu_acc_stderr,none": 0.000292315898926905,
						"bleu_diff,none": -0.9754972753765898,
						"bleu_diff_stderr,none": 0.10284966221834896,
						"bleu_max,none": 7.583145061587616,
						"bleu_max_stderr,none": 0.19277427926872076,
						"rouge1_acc,none": 0.4039167686658507,
						"rouge1_acc_stderr,none": 0.0002950588390396911,
						"rouge1_diff,none": -1.5578273857466083,
						"rouge1_diff_stderr,none": 0.16935689376322163,
						"rouge1_max,none": 22.564247471466203,
						"rouge1_max_stderr,none": 0.5056025805803193,
						"rouge2_acc,none": 0.28886168910648713,
						"rouge2_acc_stderr,none": 0.00025174094812872063,
						"rouge2_diff,none": -1.7409962519274904,
						"rouge2_diff_stderr,none": 0.2019261307050604,
						"rouge2_max,none": 12.319470457960664,
						"rouge2_max_stderr,none": 0.36582013577984945,
						"rougeL_acc,none": 0.39167686658506734,
						"rougeL_acc_stderr,none": 0.0002919927680970231,
						"rougeL_diff,none": -1.7896102946981185,
						"rougeL_diff_stderr,none": 0.16131898251823637,
						"rougeL_max,none": 20.38646640698796,
						"rougeL_max_stderr,none": 0.4557002134319161
					},
					"xcopa": {
						"acc,none": 0.5354545454545454,
						"acc_stderr,none": 0.038723016982852965,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.35504685408299863,
						"acc_stderr,none": 0.027797989181005584,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5471391613019674,
						"acc_stderr,none": 0.06424062411866863,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.640143852551135,
						"acc_stderr,none": 0.04713991502529151,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.5529875986471252,
						"acc_norm,none": 0.5535512965050733,
						"acc_norm_stderr,none": 0.038918178643137395,
						"acc_stderr,none": 0.04542179924454409,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.3478125,
						"acc_stderr,none": 0.016827375056145368,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.352,
						"acc_stderr,none": 0.015110404505648684,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.014876872027456732,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.3591666666666667,
						"acc_stderr,none": 0.013855141559780366,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.363481228668942,
						"acc_norm,none": 0.39334470989761094,
						"acc_norm_stderr,none": 0.014275101465693024,
						"acc_stderr,none": 0.014056207319068283,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.6464646464646465,
						"acc_norm,none": 0.6325757575757576,
						"acc_norm_stderr,none": 0.009892552616211553,
						"acc_stderr,none": 0.009809728948151493,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.0163,
						"acc_stderr,none": 0.01793739296998913,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.0285,
						"acc_stderr,none": 0.003721666347242934,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.036,
						"acc_stderr,none": 0.004166614973833125,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.02,
						"acc_stderr,none": 0.003131278085898044,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.067,
						"acc_stderr,none": 0.00559206004686872,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.003,
						"acc_stderr,none": 0.001223212215464709,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0065,
						"acc_stderr,none": 0.0017973564602277766,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.002,
						"acc_stderr,none": 0.000999249343069499,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.0008676789587852494,
						"acc_stderr,none": 0.00061340851413439,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.6984328358208955,
						"acc_stderr,none": 0.12960776697461782,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.012886662332274547,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.925,
						"acc_stderr,none": 0.00833333333333335,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.888,
						"acc_stderr,none": 0.009977753031397222,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.685,
						"acc_stderr,none": 0.014696631960792505,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.777,
						"acc_stderr,none": 0.013169830843425684,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.633,
						"acc_stderr,none": 0.01524937846417175,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.654,
						"acc_stderr,none": 0.01505026612756444,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.523,
						"acc_stderr,none": 0.0158025542467261,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.721,
						"acc_stderr,none": 0.014190150117612032,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.008282064512704159,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.832,
						"acc_stderr,none": 0.01182860583145425,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.01263164908309918,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.8,
						"acc_stderr,none": 0.012655439943366644,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.745,
						"acc_stderr,none": 0.01379003862087284,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.79,
						"acc_stderr,none": 0.012886662332274534,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.71,
						"acc_stderr,none": 0.014356395999905694,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.879,
						"acc_stderr,none": 0.010318210380946094,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.015070604603768408,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.559,
						"acc_stderr,none": 0.015708779894242676,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.654,
						"acc_stderr,none": 0.015050266127564441,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.675,
						"acc_stderr,none": 0.014818724459095524,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.745,
						"acc_stderr,none": 0.013790038620872833,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.61,
						"acc_stderr,none": 0.01543172505386661,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.929,
						"acc_stderr,none": 0.00812557844248791,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.335,
						"acc_stderr,none": 0.014933117490932575,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.763,
						"acc_stderr,none": 0.01345407046257795,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.01507060460376841,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.624,
						"acc_stderr,none": 0.015325105508898125,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.734,
						"acc_stderr,none": 0.013979965645145148,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.666,
						"acc_stderr,none": 0.014922019523732961,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.642,
						"acc_stderr,none": 0.015167928865407559,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.703,
						"acc_stderr,none": 0.014456832294801093,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.677,
						"acc_stderr,none": 0.014794927843348635,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.398,
						"acc_stderr,none": 0.015486634102858908,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.652,
						"acc_stderr,none": 0.01507060460376841,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.646,
						"acc_stderr,none": 0.015129868238451773,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.578,
						"acc_stderr,none": 0.01562562511262066,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.596,
						"acc_stderr,none": 0.015524980677122583,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.605,
						"acc_stderr,none": 0.015466551464829347,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.605,
						"acc_stderr,none": 0.015466551464829342,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.72,
						"acc_stderr,none": 0.014205696104091505,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.735,
						"acc_stderr,none": 0.013963164754809946,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.664,
						"acc_stderr,none": 0.014944140233795021,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 0.952,
						"acc_stderr,none": 0.006763264133666662,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.765,
						"acc_stderr,none": 0.013414729030247102,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.956,
						"acc_stderr,none": 0.006488921798427419,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.738,
						"acc_stderr,none": 0.01391220865102135,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.633,
						"acc_stderr,none": 0.015249378464171749,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.503,
						"acc_stderr,none": 0.015819015179246724,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.683,
						"acc_stderr,none": 0.01472167543888022,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.714,
						"acc_stderr,none": 0.014297146862517908,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.801,
						"acc_stderr,none": 0.01263164908309919,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.542,
						"acc_stderr,none": 0.015763390640483703,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.583,
						"acc_stderr,none": 0.015599819048769618,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.771,
						"acc_stderr,none": 0.013294199326613618,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.76,
						"acc_stderr,none": 0.013512312258920835,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.655,
						"acc_stderr,none": 0.015039986742055237,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.704,
						"acc_stderr,none": 0.014442734941575018,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.723,
						"acc_stderr,none": 0.014158794845306265,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.012864077288499325,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.779,
						"acc_stderr,none": 0.013127502859696235,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.826,
						"acc_stderr,none": 0.011994493230973428,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.711,
						"acc_stderr,none": 0.014341711358296183,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.86,
						"acc_stderr,none": 0.010978183844357793,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.815,
						"acc_stderr,none": 0.012285191326386696,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.302,
						"acc_stderr,none": 0.014526080235459548,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.014876872027456732,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.7042813455657493,
						"acc_stderr,none": 0.00798187630182266,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.07142857142857142,
						"acc_stderr,none": 0.034726602486028435,
						"alias": "cb",
						"f1,none": 0.05977907732293697,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.23699851411589895,
						"acc_norm,none": 0.23699851411589895,
						"acc_norm_stderr,none": 0.120297153104536,
						"acc_stderr,none": 0.120297153104536,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.20408163265306123,
						"acc_norm,none": 0.20408163265306123,
						"acc_norm_stderr,none": 0.058172215566282534,
						"acc_stderr,none": 0.058172215566282534,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.07226812131946557,
						"acc_stderr,none": 0.07226812131946557,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434489,
						"acc_stderr,none": 0.07233518641434489,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.30303030303030304,
						"acc_norm,none": 0.30303030303030304,
						"acc_norm_stderr,none": 0.08124094920275461,
						"acc_stderr,none": 0.08124094920275461,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482894,
						"acc_stderr,none": 0.09810018692482894,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.1702127659574468,
						"acc_norm,none": 0.1702127659574468,
						"acc_norm_stderr,none": 0.055411578656325386,
						"acc_stderr,none": 0.055411578656325386,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.1049727762162956,
						"acc_stderr,none": 0.1049727762162956,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.06180629713445797,
						"acc_stderr,none": 0.06180629713445797,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.16216216216216217,
						"acc_norm,none": 0.16216216216216217,
						"acc_norm_stderr,none": 0.06143325088732367,
						"acc_stderr,none": 0.06143325088732367,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.10540925533894598,
						"acc_stderr,none": 0.10540925533894598,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.5263157894736842,
						"acc_norm,none": 0.5263157894736842,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.08539125638299665,
						"acc_stderr,none": 0.08539125638299665,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.1724137931034483,
						"acc_norm,none": 0.1724137931034483,
						"acc_norm_stderr,none": 0.07138609234576078,
						"acc_stderr,none": 0.07138609234576078,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.2702702702702703,
						"acc_norm,none": 0.2702702702702703,
						"acc_norm_stderr,none": 0.07401656182502248,
						"acc_stderr,none": 0.07401656182502248,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.3225806451612903,
						"acc_norm,none": 0.3225806451612903,
						"acc_norm_stderr,none": 0.08534681648595453,
						"acc_stderr,none": 0.08534681648595453,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.12903225806451613,
						"acc_norm,none": 0.12903225806451613,
						"acc_norm_stderr,none": 0.06120537406777506,
						"acc_stderr,none": 0.06120537406777506,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.0723351864143449,
						"acc_stderr,none": 0.0723351864143449,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.35,
						"acc_norm,none": 0.35,
						"acc_norm_stderr,none": 0.10942433098048308,
						"acc_stderr,none": 0.10942433098048308,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.05263157894736842,
						"acc_norm,none": 0.05263157894736842,
						"acc_norm_stderr,none": 0.05263157894736841,
						"acc_stderr,none": 0.05263157894736841,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.125,
						"acc_norm,none": 0.125,
						"acc_norm_stderr,none": 0.06895966054592131,
						"acc_stderr,none": 0.06895966054592131,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764437,
						"acc_stderr,none": 0.09361833424764437,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.3181818181818182,
						"acc_norm,none": 0.3181818181818182,
						"acc_norm_stderr,none": 0.10163945352271771,
						"acc_stderr,none": 0.10163945352271771,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252415,
						"acc_stderr,none": 0.09477598811252415,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.20833333333333334,
						"acc_norm,none": 0.20833333333333334,
						"acc_norm_stderr,none": 0.08468112965594378,
						"acc_stderr,none": 0.08468112965594378,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522107,
						"acc_stderr,none": 0.10101525445522107,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.10513149660756933,
						"acc_stderr,none": 0.10513149660756933,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.08333333333333333,
						"acc_norm,none": 0.08333333333333333,
						"acc_norm_stderr,none": 0.08333333333333331,
						"acc_stderr,none": 0.08333333333333331,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.0971859061499725,
						"acc_stderr,none": 0.0971859061499725,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.38095238095238093,
						"acc_norm,none": 0.38095238095238093,
						"acc_norm_stderr,none": 0.10858813572372741,
						"acc_stderr,none": 0.10858813572372741,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.17391304347826086,
						"acc_norm,none": 0.17391304347826086,
						"acc_norm_stderr,none": 0.08081046758996392,
						"acc_stderr,none": 0.08081046758996392,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.16326530612244897,
						"acc_norm,none": 0.16326530612244897,
						"acc_norm_stderr,none": 0.05334825558285076,
						"acc_stderr,none": 0.05334825558285076,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.09090909090909091,
						"acc_norm,none": 0.09090909090909091,
						"acc_norm_stderr,none": 0.06273323266748675,
						"acc_stderr,none": 0.06273323266748675,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.3888888888888889,
						"acc_norm,none": 0.3888888888888889,
						"acc_norm_stderr,none": 0.11823563735376173,
						"acc_stderr,none": 0.11823563735376173,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.1724137931034483,
						"acc_norm,none": 0.1724137931034483,
						"acc_norm_stderr,none": 0.0713860923457608,
						"acc_stderr,none": 0.0713860923457608,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.30612244897959184,
						"acc_norm,none": 0.30612244897959184,
						"acc_norm_stderr,none": 0.06652247352247599,
						"acc_stderr,none": 0.06652247352247599,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.11363636363636363,
						"acc_norm,none": 0.11363636363636363,
						"acc_norm_stderr,none": 0.048398332783092544,
						"acc_stderr,none": 0.048398332783092544,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.10869565217391304,
						"acc_norm,none": 0.10869565217391304,
						"acc_norm_stderr,none": 0.0463994509958902,
						"acc_stderr,none": 0.0463994509958902,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.24736660335002592,
						"acc_norm,none": 0.24736660335002592,
						"acc_norm_stderr,none": 0.04073900074538987,
						"acc_stderr,none": 0.04073900074538987,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.24260355029585798,
						"acc_norm,none": 0.24260355029585798,
						"acc_norm_stderr,none": 0.03307162750323177,
						"acc_stderr,none": 0.03307162750323177,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.23648648648648649,
						"acc_norm,none": 0.23648648648648649,
						"acc_norm_stderr,none": 0.035047162412504336,
						"acc_stderr,none": 0.035047162412504336,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.23780487804878048,
						"acc_norm,none": 0.23780487804878048,
						"acc_norm_stderr,none": 0.03334645408665337,
						"acc_stderr,none": 0.03334645408665337,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.2375,
						"acc_norm,none": 0.2375,
						"acc_norm_stderr,none": 0.033748398517792225,
						"acc_stderr,none": 0.033748398517792225,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.03453131801885415,
						"acc_stderr,none": 0.03453131801885415,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.215311004784689,
						"acc_norm,none": 0.215311004784689,
						"acc_norm_stderr,none": 0.028500352224822192,
						"acc_stderr,none": 0.028500352224822192,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.2625,
						"acc_norm,none": 0.2625,
						"acc_norm_stderr,none": 0.034893706520187605,
						"acc_stderr,none": 0.034893706520187605,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.2595419847328244,
						"acc_norm,none": 0.2595419847328244,
						"acc_norm_stderr,none": 0.03844876139785271,
						"acc_stderr,none": 0.03844876139785271,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.25735294117647056,
						"acc_norm,none": 0.25735294117647056,
						"acc_norm_stderr,none": 0.037626074966240076,
						"acc_stderr,none": 0.037626074966240076,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2336448598130841,
						"acc_norm,none": 0.2336448598130841,
						"acc_norm_stderr,none": 0.04109984842463997,
						"acc_stderr,none": 0.04109984842463997,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.23219814241486067,
						"acc_norm,none": 0.23219814241486067,
						"acc_norm_stderr,none": 0.023530221420663067,
						"acc_stderr,none": 0.023530221420663067,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.2549019607843137,
						"acc_norm,none": 0.2549019607843137,
						"acc_norm_stderr,none": 0.03058759135160425,
						"acc_stderr,none": 0.03058759135160425,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.22346368715083798,
						"acc_norm,none": 0.22346368715083798,
						"acc_norm_stderr,none": 0.03122298091957976,
						"acc_stderr,none": 0.03122298091957976,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.22362869198312235,
						"acc_norm,none": 0.22362869198312235,
						"acc_norm_stderr,none": 0.027123298205229972,
						"acc_stderr,none": 0.027123298205229972,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.043960933774393765,
						"acc_stderr,none": 0.043960933774393765,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.04218811928205305,
						"acc_stderr,none": 0.04218811928205305,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2830188679245283,
						"acc_norm,none": 0.2830188679245283,
						"acc_norm_stderr,none": 0.04396093377439377,
						"acc_stderr,none": 0.04396093377439377,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.21296296296296297,
						"acc_norm,none": 0.21296296296296297,
						"acc_norm_stderr,none": 0.0395783547198098,
						"acc_stderr,none": 0.0395783547198098,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.22857142857142856,
						"acc_norm,none": 0.22857142857142856,
						"acc_norm_stderr,none": 0.04117581097845101,
						"acc_stderr,none": 0.04117581097845101,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.2169811320754717,
						"acc_norm,none": 0.2169811320754717,
						"acc_norm_stderr,none": 0.04022559246936713,
						"acc_stderr,none": 0.04022559246936713,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2600732600732601,
						"acc_norm,none": 0.2600732600732601,
						"acc_norm_stderr,none": 0.026598537627601462,
						"acc_stderr,none": 0.026598537627601462,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.25980392156862747,
						"acc_norm,none": 0.25980392156862747,
						"acc_norm_stderr,none": 0.030778554678693257,
						"acc_stderr,none": 0.030778554678693257,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.22807017543859648,
						"acc_norm,none": 0.22807017543859648,
						"acc_norm_stderr,none": 0.03218093795602357,
						"acc_stderr,none": 0.03218093795602357,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.24489795918367346,
						"acc_norm,none": 0.24489795918367346,
						"acc_norm_stderr,none": 0.03558926157606757,
						"acc_stderr,none": 0.03558926157606757,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.22302158273381295,
						"acc_norm,none": 0.22302158273381295,
						"acc_norm_stderr,none": 0.03543548499561939,
						"acc_stderr,none": 0.03543548499561939,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.23270440251572327,
						"acc_norm,none": 0.23270440251572327,
						"acc_norm_stderr,none": 0.03361670240809546,
						"acc_stderr,none": 0.03361670240809546,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.2331288343558282,
						"acc_norm,none": 0.2331288343558282,
						"acc_norm_stderr,none": 0.03322015795776741,
						"acc_stderr,none": 0.03322015795776741,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.23837209302325582,
						"acc_norm,none": 0.23837209302325582,
						"acc_norm_stderr,none": 0.03258375068525893,
						"acc_stderr,none": 0.03258375068525893,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.23809523809523808,
						"acc_norm,none": 0.23809523809523808,
						"acc_norm_stderr,none": 0.02688368747322084,
						"acc_stderr,none": 0.02688368747322084,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.03191178226713545,
						"acc_stderr,none": 0.03191178226713545,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.226890756302521,
						"acc_norm,none": 0.226890756302521,
						"acc_norm_stderr,none": 0.02720537153827947,
						"acc_stderr,none": 0.02720537153827947,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.027256850838819964,
						"acc_stderr,none": 0.027256850838819964,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2962962962962963,
						"acc_norm,none": 0.2962962962962963,
						"acc_norm_stderr,none": 0.03944624162501116,
						"acc_stderr,none": 0.03944624162501116,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.03535681229053242,
						"acc_stderr,none": 0.03535681229053242,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.3068181818181818,
						"acc_norm,none": 0.3068181818181818,
						"acc_norm_stderr,none": 0.03486142240553238,
						"acc_stderr,none": 0.03486142240553238,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.2550335570469799,
						"acc_norm,none": 0.2550335570469799,
						"acc_norm_stderr,none": 0.03582912165111174,
						"acc_stderr,none": 0.03582912165111174,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.23668639053254437,
						"acc_norm,none": 0.23668639053254437,
						"acc_norm_stderr,none": 0.032793177922689494,
						"acc_stderr,none": 0.032793177922689494,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.21212121212121213,
						"acc_norm,none": 0.21212121212121213,
						"acc_norm_stderr,none": 0.0357179155646827,
						"acc_stderr,none": 0.0357179155646827,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.2966101694915254,
						"acc_norm,none": 0.2966101694915254,
						"acc_norm_stderr,none": 0.04222776832233628,
						"acc_stderr,none": 0.04222776832233628,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.03304756158810787,
						"acc_stderr,none": 0.03304756158810787,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.24545454545454545,
						"acc_norm,none": 0.24545454545454545,
						"acc_norm_stderr,none": 0.04122066502878284,
						"acc_stderr,none": 0.04122066502878284,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.23076923076923078,
						"acc_norm,none": 0.23076923076923078,
						"acc_norm_stderr,none": 0.035356812290532405,
						"acc_stderr,none": 0.035356812290532405,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.037184890068181146,
						"acc_stderr,none": 0.037184890068181146,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.23783783783783785,
						"acc_norm,none": 0.23783783783783785,
						"acc_norm_stderr,none": 0.03138739368330483,
						"acc_stderr,none": 0.03138739368330483,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.03407826167337437,
						"acc_stderr,none": 0.03407826167337437,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.25547445255474455,
						"acc_norm,none": 0.25547445255474455,
						"acc_norm_stderr,none": 0.021538805402399563,
						"acc_stderr,none": 0.021538805402399563,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.2850467289719626,
						"acc_norm,none": 0.2850467289719626,
						"acc_norm_stderr,none": 0.030931932789218734,
						"acc_stderr,none": 0.030931932789218734,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.24390243902439024,
						"acc_norm,none": 0.24390243902439024,
						"acc_norm_stderr,none": 0.03887917804888516,
						"acc_stderr,none": 0.03887917804888516,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.3360655737704918,
						"acc_norm,none": 0.3360655737704918,
						"acc_norm_stderr,none": 0.04294196582481048,
						"acc_stderr,none": 0.04294196582481048,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2523809523809524,
						"acc_norm,none": 0.2523809523809524,
						"acc_norm_stderr,none": 0.030046599156031494,
						"acc_stderr,none": 0.030046599156031494,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.2388888888888889,
						"acc_norm,none": 0.2388888888888889,
						"acc_norm_stderr,none": 0.03187098535605761,
						"acc_stderr,none": 0.03187098535605761,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2698412698412698,
						"acc_norm,none": 0.2698412698412698,
						"acc_norm_stderr,none": 0.03237307120120853,
						"acc_stderr,none": 0.03237307120120853,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.20689655172413793,
						"acc_norm,none": 0.20689655172413793,
						"acc_norm_stderr,none": 0.03777396948311489,
						"acc_stderr,none": 0.03777396948311489,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.21379310344827587,
						"acc_norm,none": 0.21379310344827587,
						"acc_norm_stderr,none": 0.034165204477475494,
						"acc_stderr,none": 0.034165204477475494,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.26666666666666666,
						"acc_norm,none": 0.26666666666666666,
						"acc_norm_stderr,none": 0.04336290903919941,
						"acc_stderr,none": 0.04336290903919941,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.032377088536015224,
						"acc_stderr,none": 0.032377088536015224,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.26540284360189575,
						"acc_norm,none": 0.26540284360189575,
						"acc_norm_stderr,none": 0.030469670650846655,
						"acc_stderr,none": 0.030469670650846655,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.19148936170212766,
						"acc_norm,none": 0.19148936170212766,
						"acc_norm_stderr,none": 0.020318870444788667,
						"acc_stderr,none": 0.020318870444788667,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.28448275862068967,
						"acc_norm,none": 0.28448275862068967,
						"acc_norm_stderr,none": 0.029684657126093528,
						"acc_stderr,none": 0.029684657126093528,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.2471264367816092,
						"acc_norm,none": 0.2471264367816092,
						"acc_norm_stderr,none": 0.032794240385439676,
						"acc_stderr,none": 0.032794240385439676,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.25925925925925924,
						"acc_norm,none": 0.25925925925925924,
						"acc_norm_stderr,none": 0.03785714465066653,
						"acc_stderr,none": 0.03785714465066653,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.26548672566371684,
						"acc_norm,none": 0.26548672566371684,
						"acc_norm_stderr,none": 0.02943946890825876,
						"acc_stderr,none": 0.02943946890825876,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2545454545454545,
						"acc_norm,none": 0.2545454545454545,
						"acc_norm_stderr,none": 0.03401506715249039,
						"acc_stderr,none": 0.03401506715249039,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.03231470996617757,
						"acc_stderr,none": 0.03231470996617757,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516736,
						"acc_stderr,none": 0.03385633936516736,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.03471460744058984,
						"acc_stderr,none": 0.03471460744058984,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.26875,
						"acc_norm,none": 0.26875,
						"acc_norm_stderr,none": 0.035156741348767645,
						"acc_stderr,none": 0.035156741348767645,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": -0.02929206145132745,
						"mcc_stderr,none": 0.01303660368388387
					},
					"copa": {
						"acc,none": 0.77,
						"acc_stderr,none": 0.04229525846816505,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 9.684294871794872,
						"likelihood_diff_stderr,none": 0.8716334090272987,
						"pct_stereotype,none": 0.5494931425163982,
						"pct_stereotype_stderr,none": 0.06468297465811357
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 10.044871794871796,
						"likelihood_diff_stderr,none": 0.20969369842216884,
						"pct_stereotype,none": 0.5909361955873583,
						"pct_stereotype_stderr,none": 0.012009607538515816
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 10.634615384615385,
						"likelihood_diff_stderr,none": 1.0633211306118544,
						"pct_stereotype,none": 0.6923076923076923,
						"pct_stereotype_stderr,none": 0.04865042554105198
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 5.681818181818182,
						"likelihood_diff_stderr,none": 1.5692683982624271,
						"pct_stereotype,none": 0.45454545454545453,
						"pct_stereotype_stderr,none": 0.15745916432444335
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 12.653846153846153,
						"likelihood_diff_stderr,none": 1.1944469409825142,
						"pct_stereotype,none": 0.7230769230769231,
						"pct_stereotype_stderr,none": 0.055934767585573
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 8.87109375,
						"likelihood_diff_stderr,none": 0.447599110178382,
						"pct_stereotype,none": 0.603125,
						"pct_stereotype_stderr,none": 0.02739272232337023
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 8.39236111111111,
						"likelihood_diff_stderr,none": 0.48622408939982625,
						"pct_stereotype,none": 0.5416666666666666,
						"pct_stereotype_stderr,none": 0.03398110890294636
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 12.56076388888889,
						"likelihood_diff_stderr,none": 1.1987538997495408,
						"pct_stereotype,none": 0.6805555555555556,
						"pct_stereotype_stderr,none": 0.05533504751887218
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 10.17101377952756,
						"likelihood_diff_stderr,none": 0.377061695519183,
						"pct_stereotype,none": 0.5196850393700787,
						"pct_stereotype_stderr,none": 0.022188563396746394
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 9.371621621621621,
						"likelihood_diff_stderr,none": 0.8401636432439823,
						"pct_stereotype,none": 0.5945945945945946,
						"pct_stereotype_stderr,none": 0.04681218398834801
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 11.862903225806452,
						"likelihood_diff_stderr,none": 0.9631589998617119,
						"pct_stereotype,none": 0.6881720430107527,
						"pct_stereotype_stderr,none": 0.04829610685421209
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 11.30657894736842,
						"likelihood_diff_stderr,none": 0.6113729812505367,
						"pct_stereotype,none": 0.6947368421052632,
						"pct_stereotype_stderr,none": 0.03349781342677419
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 9.322152653548002,
						"likelihood_diff_stderr,none": 0.20118289435588768,
						"pct_stereotype,none": 0.5044722719141324,
						"pct_stereotype_stderr,none": 0.012212810647205391
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 9.38888888888889,
						"likelihood_diff_stderr,none": 0.8743029246811306,
						"pct_stereotype,none": 0.43333333333333335,
						"pct_stereotype_stderr,none": 0.052526671187288064
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 8.173076923076923,
						"likelihood_diff_stderr,none": 2.2626752332240185,
						"pct_stereotype,none": 0.38461538461538464,
						"pct_stereotype_stderr,none": 0.1404416814115811
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 11.378787878787879,
						"likelihood_diff_stderr,none": 1.2801519889940718,
						"pct_stereotype,none": 0.6212121212121212,
						"pct_stereotype_stderr,none": 0.0601674102524024
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 8.97702492211838,
						"likelihood_diff_stderr,none": 0.45334602847309374,
						"pct_stereotype,none": 0.4984423676012461,
						"pct_stereotype_stderr,none": 0.027950714088670354
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 9.132905138339922,
						"likelihood_diff_stderr,none": 0.4717960865681399,
						"pct_stereotype,none": 0.4031620553359684,
						"pct_stereotype_stderr,none": 0.030900660885291857
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 9.975694444444445,
						"likelihood_diff_stderr,none": 0.9494885489322957,
						"pct_stereotype,none": 0.5833333333333334,
						"pct_stereotype_stderr,none": 0.05850912479161746
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 8.7875,
						"likelihood_diff_stderr,none": 0.34899221358985677,
						"pct_stereotype,none": 0.45869565217391306,
						"pct_stereotype_stderr,none": 0.023258233524708842
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 9.039130434782608,
						"likelihood_diff_stderr,none": 0.7865768856434114,
						"pct_stereotype,none": 0.6173913043478261,
						"pct_stereotype_stderr,none": 0.04552031372871532
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 10.17032967032967,
						"likelihood_diff_stderr,none": 0.9913998249727658,
						"pct_stereotype,none": 0.6593406593406593,
						"pct_stereotype_stderr,none": 0.049956709512768704
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 10.186224489795919,
						"likelihood_diff_stderr,none": 0.6750768471115579,
						"pct_stereotype,none": 0.6020408163265306,
						"pct_stereotype_stderr,none": 0.035052171504729904
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.04675196850393701,
						"exact_match_stderr,none": 0.004684335017570898
					},
					"glue": {
						"acc,none": 0.40714857213674177,
						"acc_stderr,none": 0.05584131660910533,
						"alias": "glue",
						"f1,none": 0.4559448662253682,
						"f1_stderr,none": 0.0011006139595858319,
						"mcc,none": -0.02929206145132745,
						"mcc_stderr,none": 0.0001699530356106545
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.00530705079605762,
						"exact_match_stderr,get-answer": 0.002001305720948044
					},
					"hellaswag": {
						"acc,none": 0.5368452499502091,
						"acc_norm,none": 0.6909978092013543,
						"acc_norm_stderr,none": 0.004611377019520788,
						"acc_stderr,none": 0.004976214989483506,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.2017614784868611,
						"acc_norm,none": 0.2017614784868611,
						"acc_norm_stderr,none": 0.027959380698846664,
						"acc_stderr,none": 0.027959380698846664,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.0416333199893227,
						"acc_stderr,none": 0.0416333199893227,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.012535235623319332,
						"acc_stderr,none": 0.012535235623319332,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.194,
						"acc_norm,none": 0.194,
						"acc_norm_stderr,none": 0.012510816141264378,
						"acc_stderr,none": 0.012510816141264378,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.249,
						"acc_norm,none": 0.249,
						"acc_norm_stderr,none": 0.013681600278702319,
						"acc_stderr,none": 0.013681600278702319,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.221,
						"acc_norm,none": 0.221,
						"acc_norm_stderr,none": 0.013127502859696244,
						"acc_stderr,none": 0.013127502859696244,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.22666666666666666,
						"acc_norm,none": 0.22666666666666666,
						"acc_norm_stderr,none": 0.017106603245700853,
						"acc_stderr,none": 0.017106603245700853,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.147,
						"acc_norm,none": 0.147,
						"acc_norm_stderr,none": 0.011203415395160333,
						"acc_stderr,none": 0.011203415395160333,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.197,
						"acc_norm,none": 0.197,
						"acc_norm_stderr,none": 0.012583693787968123,
						"acc_stderr,none": 0.012583693787968123,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.177,
						"acc_norm,none": 0.177,
						"acc_norm_stderr,none": 0.012075463420375061,
						"acc_stderr,none": 0.012075463420375061,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.028085923439997273,
						"acc_stderr,none": 0.028085923439997273,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.186,
						"acc_norm,none": 0.186,
						"acc_norm_stderr,none": 0.012310790208412817,
						"acc_stderr,none": 0.012310790208412817,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.25384615384615383,
						"acc_norm,none": 0.25384615384615383,
						"acc_norm_stderr,none": 0.03831815850874501,
						"acc_stderr,none": 0.03831815850874501,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.04292346959909284,
						"acc_stderr,none": 0.04292346959909284,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.182,
						"acc_norm,none": 0.182,
						"acc_norm_stderr,none": 0.01220758063766215,
						"acc_stderr,none": 0.01220758063766215,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.171,
						"acc_norm,none": 0.171,
						"acc_norm_stderr,none": 0.011912216456264595,
						"acc_stderr,none": 0.011912216456264595,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.232,
						"acc_norm,none": 0.232,
						"acc_norm_stderr,none": 0.013354937452281581,
						"acc_stderr,none": 0.013354937452281581,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.144,
						"acc_norm,none": 0.144,
						"acc_norm_stderr,none": 0.011107987548939149,
						"acc_stderr,none": 0.011107987548939149,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.221,
						"acc_norm,none": 0.221,
						"acc_norm_stderr,none": 0.013127502859696237,
						"acc_stderr,none": 0.013127502859696237,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.192,
						"acc_norm,none": 0.192,
						"acc_norm_stderr,none": 0.012461592646659973,
						"acc_stderr,none": 0.012461592646659973,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.182,
						"acc_norm,none": 0.182,
						"acc_norm_stderr,none": 0.012207580637662153,
						"acc_stderr,none": 0.012207580637662153,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.196,
						"acc_norm,none": 0.196,
						"acc_norm_stderr,none": 0.012559527926707371,
						"acc_stderr,none": 0.012559527926707371,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.29,
						"acc_norm,none": 0.29,
						"acc_norm_stderr,none": 0.045604802157206845,
						"acc_stderr,none": 0.045604802157206845,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.161,
						"acc_norm,none": 0.161,
						"acc_norm_stderr,none": 0.01162816469672718,
						"acc_stderr,none": 0.01162816469672718,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.212,
						"acc_norm,none": 0.212,
						"acc_norm_stderr,none": 0.012931481864938022,
						"acc_stderr,none": 0.012931481864938022,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.229,
						"acc_norm,none": 0.229,
						"acc_norm_stderr,none": 0.013294199326613606,
						"acc_stderr,none": 0.013294199326613606,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.013512312258920835,
						"acc_stderr,none": 0.013512312258920835,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.199,
						"acc_norm,none": 0.199,
						"acc_norm_stderr,none": 0.012631649083099177,
						"acc_stderr,none": 0.012631649083099177,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.223,
						"acc_norm,none": 0.223,
						"acc_norm_stderr,none": 0.013169830843425658,
						"acc_stderr,none": 0.013169830843425658,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.24666666666666667,
						"acc_norm,none": 0.24666666666666667,
						"acc_norm_stderr,none": 0.01761308429172702,
						"acc_stderr,none": 0.01761308429172702,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.218,
						"acc_norm,none": 0.218,
						"acc_norm_stderr,none": 0.013063179040595297,
						"acc_stderr,none": 0.013063179040595297,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.206,
						"acc_norm,none": 0.206,
						"acc_norm_stderr,none": 0.012795613612786555,
						"acc_stderr,none": 0.012795613612786555,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.202,
						"acc_norm,none": 0.202,
						"acc_norm_stderr,none": 0.012702651587655133,
						"acc_stderr,none": 0.012702651587655133,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.189,
						"acc_norm,none": 0.189,
						"acc_norm_stderr,none": 0.012386784588117707,
						"acc_stderr,none": 0.012386784588117707,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.0446196043338474,
						"acc_stderr,none": 0.0446196043338474,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.02395648228514077,
						"acc_stderr,none": 0.02395648228514077,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.01331455133593595,
						"acc_stderr,none": 0.01331455133593595,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.187,
						"acc_norm,none": 0.187,
						"acc_norm_stderr,none": 0.012336254828074144,
						"acc_stderr,none": 0.012336254828074144,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.012535235623319332,
						"acc_stderr,none": 0.012535235623319332,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.24,
						"acc_norm,none": 0.24,
						"acc_norm_stderr,none": 0.030275120389073037,
						"acc_stderr,none": 0.030275120389073037,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.19,
						"acc_norm,none": 0.19,
						"acc_norm_stderr,none": 0.012411851354816322,
						"acc_stderr,none": 0.012411851354816322,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.209,
						"acc_norm,none": 0.209,
						"acc_norm_stderr,none": 0.012864077288499325,
						"acc_stderr,none": 0.012864077288499325,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.029365141882663322,
						"acc_stderr,none": 0.029365141882663322,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.195,
						"acc_norm,none": 0.195,
						"acc_norm_stderr,none": 0.012535235623319329,
						"acc_stderr,none": 0.012535235623319329,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.48454286340714753,
						"acc_norm,none": 0.436,
						"acc_norm_stderr,none": 0.0004927935871743444,
						"acc_stderr,none": 0.037575934484114504,
						"alias": "kobest",
						"f1,none": 0.4594032241773939,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5185185185185185,
						"acc_stderr,none": 0.013339608823275211,
						"alias": " - kobest_boolq",
						"f1,none": 0.5144830860145526,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.496,
						"acc_stderr,none": 0.01581879370351089,
						"alias": " - kobest_copa",
						"f1,none": 0.49519230769230765,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.356,
						"acc_norm,none": 0.436,
						"acc_norm_stderr,none": 0.0221989546414768,
						"acc_stderr,none": 0.021434712356072645,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.3535767914247874,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.4987405541561713,
						"acc_stderr,none": 0.025125865671612197,
						"alias": " - kobest_sentineg",
						"f1,none": 0.49130420784907114,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.48412698412698413,
						"acc_stderr,none": 0.014084394649774396,
						"alias": " - kobest_wic",
						"f1,none": 0.4015677609427609,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.5022317096836794,
						"acc_stderr,none": 0.02888484280774511,
						"alias": "lambada",
						"perplexity,none": 26.844288482467036,
						"perplexity_stderr,none": 7.112567317232621
					},
					"lambada_cloze": {
						"acc,none": 0.0474480884921405,
						"acc_stderr,none": 0.006144316666335166,
						"alias": "lambada_cloze",
						"perplexity,none": 34604.264230774505,
						"perplexity_stderr,none": 13529.118944967058
					},
					"lambada_multilingual": {
						"acc,none": 0.3100329904909761,
						"acc_stderr,none": 0.07030764008109443,
						"alias": "lambada_multilingual",
						"perplexity,none": 1539.6668436962125,
						"perplexity_stderr,none": 637.4153305857243
					},
					"lambada_openai": {
						"acc,none": 0.5583155443431012,
						"acc_stderr,none": 0.0069184369937385914,
						"alias": " - lambada_openai",
						"perplexity,none": 13.204578722248053,
						"perplexity_stderr,none": 0.7151533605421062
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.036677663496992044,
						"acc_stderr,none": 0.0026187782113318366,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 60684.42665973713,
						"perplexity_stderr,none": 5058.996863971963
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.2447118183582379,
						"acc_stderr,none": 0.005989573373070088,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 2746.8437315343094,
						"perplexity_stderr,none": 305.47200641450394
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.5573452357849796,
						"acc_stderr,none": 0.006920011095249961,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 13.144323504470321,
						"perplexity_stderr,none": 0.7112097365952054
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.21618474674946633,
						"acc_stderr,none": 0.00573497398727918,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 2552.5754228816513,
						"perplexity_stderr,none": 271.95186762774784
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.2815835435668543,
						"acc_stderr,none": 0.006266194106395884,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 864.5218865840123,
						"perplexity_stderr,none": 86.51281743433115
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.2503396079953425,
						"acc_stderr,none": 0.006035442817612808,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 1521.2488539766175,
						"perplexity_stderr,none": 156.93736488600635
					},
					"lambada_standard": {
						"acc,none": 0.44614787502425773,
						"acc_stderr,none": 0.006925456414702117,
						"alias": " - lambada_standard",
						"perplexity,none": 40.53420763702591,
						"perplexity_stderr,none": 2.6373844393864556
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.05821851348728896,
						"acc_stderr,none": 0.0032622534822329035,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 8524.101801811876,
						"perplexity_stderr,none": 602.2873428363857
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.23473282442748092,
						"exact_match_stderr,get-answer": 0.010693142700087904
					},
					"logiqa": {
						"acc,none": 0.24270353302611367,
						"acc_norm,none": 0.2749615975422427,
						"acc_norm_stderr,none": 0.017512971782225207,
						"acc_stderr,none": 0.01681567620647953,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.2340966921119593,
						"acc_norm,none": 0.25127226463104324,
						"acc_norm_stderr,none": 0.01094324556925147,
						"acc_stderr,none": 0.010683080933862762,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.240536013400335,
						"acc_norm,none": 0.23685092127303184,
						"acc_norm_stderr,none": 0.007782924578956575,
						"acc_stderr,none": 0.007824277362109031,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.40711713619995765,
						"acc_stderr,none": 0.005056326872107211,
						"alias": "mc_taco",
						"f1,none": 0.5058262711864406,
						"f1_stderr,none": 0.005739597176029872
					},
					"medmcqa": {
						"acc,none": 0.28544107100167343,
						"acc_norm,none": 0.28544107100167343,
						"acc_norm_stderr,none": 0.0069836946461276144,
						"acc_stderr,none": 0.0069836946461276144,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.24823252160251374,
						"acc_norm,none": 0.24823252160251374,
						"acc_norm_stderr,none": 0.012112325990383007,
						"acc_stderr,none": 0.012112325990383007,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.26442102264634665,
						"acc_stderr,none": 0.0400659570556117,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.042923469599092816,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.037857144650666544,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.27631578947368424,
						"acc_stderr,none": 0.03639057569952925,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3433962264150943,
						"acc_stderr,none": 0.02922452646912479,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2569444444444444,
						"acc_stderr,none": 0.03653946969442099,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768078,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.03942772444036623,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.27,
						"acc_stderr,none": 0.0446196043338474,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.31213872832369943,
						"acc_stderr,none": 0.03533133389323657,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2549019607843137,
						"acc_stderr,none": 0.043364327079931785,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.36,
						"acc_stderr,none": 0.048241815132442176,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.2127659574468085,
						"acc_stderr,none": 0.026754391348039773,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.17543859649122806,
						"acc_stderr,none": 0.0357795481394837,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.21379310344827587,
						"acc_stderr,none": 0.034165204477475494,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.24074074074074073,
						"acc_stderr,none": 0.0220190800122179,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.19047619047619047,
						"acc_stderr,none": 0.035122074123020534,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.26,
						"acc_stderr,none": 0.04408440022768079,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.24193548387096775,
						"acc_stderr,none": 0.024362599693031086,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.24630541871921183,
						"acc_stderr,none": 0.030315099285617736,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.3,
						"acc_stderr,none": 0.046056618647183814,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.2909090909090909,
						"acc_stderr,none": 0.03546563019624336,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.03173071239071724,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.21761658031088082,
						"acc_stderr,none": 0.029778663037752954,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.24871794871794872,
						"acc_stderr,none": 0.0219169577092138,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25555555555555554,
						"acc_stderr,none": 0.026593939101844072,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.028657491285071973,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.23841059602649006,
						"acc_stderr,none": 0.0347918557259966,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.28807339449541286,
						"acc_stderr,none": 0.01941644589263602,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.20833333333333334,
						"acc_stderr,none": 0.027696910713093936,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.2696078431372549,
						"acc_stderr,none": 0.031145570659486782,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.31645569620253167,
						"acc_stderr,none": 0.030274974880218974,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.2914798206278027,
						"acc_stderr,none": 0.030500283176545916,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.2900763358778626,
						"acc_stderr,none": 0.03980066246467765,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.26971307120085014,
						"acc_stderr,none": 0.0328318856965235,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.2892561983471074,
						"acc_stderr,none": 0.04139112727635464,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.2777777777777778,
						"acc_stderr,none": 0.043300437496507416,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.2883435582822086,
						"acc_stderr,none": 0.035590395316173425,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.30357142857142855,
						"acc_stderr,none": 0.04364226155841044,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.17475728155339806,
						"acc_stderr,none": 0.037601780060266196,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.31196581196581197,
						"acc_stderr,none": 0.03035152732334496,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.24,
						"acc_stderr,none": 0.04292346959909283,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.3167305236270754,
						"acc_stderr,none": 0.016635566427712474,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.26878612716763006,
						"acc_stderr,none": 0.023868003262500114,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2558659217877095,
						"acc_stderr,none": 0.014593620923210763,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.2581699346405229,
						"acc_stderr,none": 0.025058503316958154,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.2774380431284197,
						"acc_stderr,none": 0.046228362938913345,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.2765273311897106,
						"acc_stderr,none": 0.025403832978179604,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.27469135802469136,
						"acc_stderr,none": 0.024836057868294677,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.24468085106382978,
						"acc_stderr,none": 0.025645553622266726,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.25945241199478486,
						"acc_stderr,none": 0.011195262076350286,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.1948529411764706,
						"acc_stderr,none": 0.02406059942348742,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.01751781884501444,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.3181818181818182,
						"acc_stderr,none": 0.04461272175910508,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.23673469387755103,
						"acc_stderr,none": 0.027212835884073142,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.2596685082872928,
						"acc_stderr,none": 0.03490150431857856,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.263681592039801,
						"acc_stderr,none": 0.031157150869355558,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.2483349191246432,
						"acc_stderr,none": 0.0444512172922875,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.21084337349397592,
						"acc_stderr,none": 0.031755547866299194,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.3508771929824561,
						"acc_stderr,none": 0.03660298834049163,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.33316352521650533,
						"acc_stderr,none": 0.00475790066914407,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.3263832384052075,
						"acc_stderr,none": 0.004729024000627118,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.6764705882352942,
						"acc_stderr,none": 0.023189113109403536,
						"alias": "mrpc",
						"f1,none": 0.7981651376146789,
						"f1_stderr,none": 0.01719050669014494
					},
					"multimedqa": {
						"acc,none": 0.30333569907735985,
						"acc_norm,none": 0.2738906488825624,
						"acc_norm_stderr,none": 0.00013865662123225387,
						"acc_stderr,none": 0.09004895699791461,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5699257425742574,
						"acc_stderr,none": 0.007111223871933897,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.6996802123217227,
						"mrr_stderr,none": 0.010498066767157196,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4051918735891648,
						"r@2_stderr,none": 0.01650240246733025
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.6424943584082627,
						"mrr_stderr,none": 0.010467019901892836,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.45711060948081267,
						"r@2_stderr,none": 0.016745367862103507
					},
					"openbookqa": {
						"acc,none": 0.302,
						"acc_norm,none": 0.414,
						"acc_norm_stderr,none": 0.022049497969827865,
						"acc_stderr,none": 0.020553269174209195,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4605,
						"acc_stderr,none": 0.011148184426533295,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.4655,
						"acc_stderr,none": 0.011156482803925172,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.4445,
						"acc_stderr,none": 0.011114028784284503,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5015,
						"acc_stderr,none": 0.011183085696839195,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.494,
						"acc_stderr,none": 0.01118233080628221,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.486,
						"acc_stderr,none": 0.011178751372184874,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.49,
						"acc_stderr,none": 0.011180899170152985,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.4774285714285714,
						"acc_stderr,none": 0.0164166009344586,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7225244831338411,
						"acc_norm,none": 0.7241566920565833,
						"acc_norm_stderr,none": 0.010427805502729115,
						"acc_stderr,none": 0.010446818281039954,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.32557643040136636,
						"acc_norm,none": 0.33534372331340734,
						"acc_norm_stderr,none": 0.003449187451821404,
						"acc_stderr,none": 0.0034234658473118597,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.644,
						"acc_stderr,none": 0.02143471235607266,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.6183426651297151,
						"acc_norm,none": 0.5580943692382687,
						"acc_norm_stderr,none": 0.0033940005642664273,
						"acc_stderr,none": 0.12331965059934231,
						"alias": "pythia",
						"bits_per_byte,none": 1.1244889239038887,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 2.1802429753546315,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 13.204578722248053,
						"perplexity_stderr,none": 0.7151533605421062,
						"word_perplexity,none": 64.5859752567749,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.3670212765957447,
						"acc_norm,none": 0.4308510638297872,
						"acc_norm_stderr,none": 0.07506396300310132,
						"acc_stderr,none": 0.046742342657368985,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.44166666666666665,
						"acc_norm,none": 0.5916666666666667,
						"acc_norm_stderr,none": 0.045058059858031296,
						"acc_stderr,none": 0.04552192400253557,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.35625,
						"acc_norm,none": 0.40625,
						"acc_norm_stderr,none": 0.03894932504400619,
						"acc_stderr,none": 0.03797847267587851,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.3415492957746479,
						"acc_norm,none": 0.3767605633802817,
						"acc_norm_stderr,none": 0.028804939288711216,
						"acc_stderr,none": 0.028190002383528697,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5017389712612118,
						"acc_stderr,none": 0.0067653696341649335,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.4229779866435815,
						"acc_stderr,none": 0.0024570193348718372,
						"alias": "qqp",
						"f1,none": 0.4527691116792944,
						"f1_stderr,none": 0.003008675471492587
					},
					"race": {
						"acc,none": 0.3751196172248804,
						"acc_stderr,none": 0.014984183551431952,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.5415162454873647,
						"acc_stderr,none": 0.029992535385373314,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.882,
						"acc_norm,none": 0.828,
						"acc_norm_stderr,none": 0.011939788882495321,
						"acc_stderr,none": 0.0102068692643818,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.5415162454873647,
						"acc_stderr,none": 0.029992535385373314,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.6548165137614679,
						"acc_stderr,none": 0.01610926550804417,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5485354393681895,
						"acc_norm,none": 0.7043886833949815,
						"acc_norm_stderr,none": 0.00322624567256072,
						"acc_stderr,none": 0.003518397472724458,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5252737013743303,
						"acc_stderr,none": 0.01400175046915762,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5028044871794872,
						"acc_stderr,none": 0.005004176707396474,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.5552852944157292,
						"acc_stderr,none": 0.005002974345626141,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.518235294117647,
						"acc_stderr,none": 0.00494768666137911,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.31755435383640956,
						"acc_stderr,none": 0.04394825005271716,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3929008567931457,
						"bleu_acc_stderr,none": 0.000292315898926905,
						"bleu_diff,none": -0.9754972753765898,
						"bleu_diff_stderr,none": 0.10284966221834896,
						"bleu_max,none": 7.583145061587616,
						"bleu_max_stderr,none": 0.19277427926872076,
						"rouge1_acc,none": 0.4039167686658507,
						"rouge1_acc_stderr,none": 0.0002950588390396911,
						"rouge1_diff,none": -1.5578273857466083,
						"rouge1_diff_stderr,none": 0.16935689376322163,
						"rouge1_max,none": 22.564247471466203,
						"rouge1_max_stderr,none": 0.5056025805803193,
						"rouge2_acc,none": 0.28886168910648713,
						"rouge2_acc_stderr,none": 0.00025174094812872063,
						"rouge2_diff,none": -1.7409962519274904,
						"rouge2_diff_stderr,none": 0.2019261307050604,
						"rouge2_max,none": 12.319470457960664,
						"rouge2_max_stderr,none": 0.36582013577984945,
						"rougeL_acc,none": 0.39167686658506734,
						"rougeL_acc_stderr,none": 0.0002919927680970231,
						"rougeL_diff,none": -1.7896102946981185,
						"rougeL_diff_stderr,none": 0.16131898251823637,
						"rougeL_max,none": 20.38646640698796,
						"rougeL_max_stderr,none": 0.4557002134319161
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3929008567931457,
						"bleu_acc_stderr,none": 0.017097248285233065,
						"bleu_diff,none": -0.9754972753765898,
						"bleu_diff_stderr,none": 0.3207018275881024,
						"bleu_max,none": 7.583145061587616,
						"bleu_max_stderr,none": 0.43906067834494217,
						"rouge1_acc,none": 0.4039167686658507,
						"rouge1_acc_stderr,none": 0.01717727682258428,
						"rouge1_diff,none": -1.5578273857466083,
						"rouge1_diff_stderr,none": 0.41152994272983545,
						"rouge1_max,none": 22.564247471466203,
						"rouge1_max_stderr,none": 0.7110573679952408,
						"rouge2_acc,none": 0.28886168910648713,
						"rouge2_acc_stderr,none": 0.015866346401384304,
						"rouge2_diff,none": -1.7409962519274904,
						"rouge2_diff_stderr,none": 0.44936191505851986,
						"rouge2_max,none": 12.319470457960664,
						"rouge2_max_stderr,none": 0.604830667029913,
						"rougeL_acc,none": 0.39167686658506734,
						"rougeL_acc_stderr,none": 0.01708779588176963,
						"rougeL_diff,none": -1.7896102946981185,
						"rougeL_diff_stderr,none": 0.4016453442008713,
						"rougeL_max,none": 20.38646640698796,
						"rougeL_max_stderr,none": 0.6750557113541934
					},
					"truthfulqa_mc1": {
						"acc,none": 0.23011015911872704,
						"acc_stderr,none": 0.014734557959807763,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.36127645119525076,
						"acc_stderr,none": 0.01545598236776507,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.04675196850393701,
						"exact_match_stderr,none": 0.004684335017570898
					},
					"wic": {
						"acc,none": 0.5,
						"acc_stderr,none": 0.01981072129375818,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 1.1244889239038887,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 2.1802429753546315,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 64.5859752567749,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.5919494869771112,
						"acc_stderr,none": 0.013812822643745021,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.4507042253521127,
						"acc_stderr,none": 0.05947027187737998,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.36538461538461536,
						"acc_stderr,none": 0.0474473339327792,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7032967032967034,
						"acc_stderr,none": 0.027697847815938703,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5354545454545454,
						"acc_stderr,none": 0.038723016982852965,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.506,
						"acc_stderr,none": 0.022381462412439324,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.516,
						"acc_stderr,none": 0.0223716109825804,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.022231970696321122,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.628,
						"acc_stderr,none": 0.021637197985722396,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.526,
						"acc_stderr,none": 0.02235279165091416,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.532,
						"acc_stderr,none": 0.022337186479044296,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.524,
						"acc_stderr,none": 0.022357273881016403,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.55,
						"acc_stderr,none": 0.022270877485360437,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.474,
						"acc_stderr,none": 0.022352791650914163,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.564,
						"acc_stderr,none": 0.0221989546414768,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.35504685408299863,
						"acc_stderr,none": 0.027797989181005584,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3317269076305221,
						"acc_stderr,none": 0.009437454900329123,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.3389558232931727,
						"acc_stderr,none": 0.009487992732201527,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.37991967871485943,
						"acc_stderr,none": 0.009728758452987872,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.3228915662650602,
						"acc_stderr,none": 0.009372274805730624,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.43132530120481927,
						"acc_stderr,none": 0.009927090290379255,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.3718875502008032,
						"acc_stderr,none": 0.009687507958631809,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.40923694779116465,
						"acc_stderr,none": 0.00985556741448024,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3389558232931727,
						"acc_stderr,none": 0.009487992732201519,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.3823293172690763,
						"acc_stderr,none": 0.009740580649033704,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3385542168674699,
						"acc_stderr,none": 0.009485250208516873,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3317269076305221,
						"acc_stderr,none": 0.00943745490032912,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3325301204819277,
						"acc_stderr,none": 0.009443193365903347,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.334136546184739,
						"acc_stderr,none": 0.009454577602463628,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3373493975903614,
						"acc_stderr,none": 0.009476976849778593,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3441767068273092,
						"acc_stderr,none": 0.009522954469806036,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5471391613019674,
						"acc_stderr,none": 0.06424062411866863,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.4811383189940437,
						"acc_stderr,none": 0.012857966762465003,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.7319655857048313,
						"acc_stderr,none": 0.011398616363361086,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.6293845135671741,
						"acc_stderr,none": 0.012428861084065901,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5102581072137657,
						"acc_stderr,none": 0.012864417047980472,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5029781601588352,
						"acc_stderr,none": 0.012866897066011221,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.500992720052945,
						"acc_stderr,none": 0.012867099955422925,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.49040370615486434,
						"acc_stderr,none": 0.01286475526040896,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.5830575777630708,
						"acc_stderr,none": 0.012688354121607806,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5188616810059563,
						"acc_stderr,none": 0.012857966762464998,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5261416280608868,
						"acc_stderr,none": 0.012849526888044213,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5433487756452681,
						"acc_stderr,none": 0.012818676452481956,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.640143852551135,
						"acc_stderr,none": 0.04713991502529151,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.7243010752688172,
						"acc_stderr,none": 0.009269558278880766,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6265060240963856,
						"acc_stderr,none": 0.05341921480681956,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5109489051094891,
						"acc_stderr,none": 0.01615039318009044,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6349809885931559,
						"acc_stderr,none": 0.029743184010936927,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.5587301587301587,
						"acc_stderr,none": 0.02802130493237513,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.5535714285714286,
						"acc_stderr,none": 0.022165566315820333,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "togethercomputer/RedPajama-INCITE-7B-Chat"
	},
	"togethercomputer/RedPajama-INCITE-7B-Instruct": {
		"config": {
			"dtype=bfloat16,trust_remote_code=True": {
				"confObj": {
					"dtype": "bfloat16",
					"trust_remote_code": "True"
				},
				"confStr": "dtype=bfloat16,trust_remote_code=True",
				"groups": {
					"ai2_arc": {
						"acc,none": 0.600620067643743,
						"acc_norm,none": 0.6164036076662909,
						"acc_norm_stderr,none": 0.10272219392035134,
						"acc_stderr,none": 0.10212982681108428,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.325,
						"acc_stderr,none": 0.014605967635886613,
						"alias": "anli"
					},
					"arithmetic": {
						"acc,none": 0.15695,
						"acc_stderr,none": 0.18340546292926357,
						"alias": "arithmetic"
					},
					"blimp": {
						"acc,none": 0.815955223880597,
						"acc_stderr,none": 0.16557624951270272,
						"alias": " - blimp"
					},
					"ceval-valid": {
						"acc,none": 0.26745913818722133,
						"acc_norm,none": 0.26745913818722133,
						"acc_norm_stderr,none": 0.12340262880518994,
						"acc_stderr,none": 0.12340262880518994,
						"alias": "ceval-valid"
					},
					"cmmlu": {
						"acc,none": 0.2572094629597652,
						"acc_norm,none": 0.2572094629597652,
						"acc_norm_stderr,none": 0.04141957805238283,
						"acc_stderr,none": 0.04141957805238283,
						"alias": "cmmlu"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.590777802623733,
						"likelihood_diff_stderr,none": 0.4303348750203673,
						"pct_stereotype,none": 0.6197078115682767,
						"pct_stereotype_stderr,none": 0.09384099725563327
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.2263779527559055,
						"exact_match_stderr,none": 0.009285953859206367
					},
					"glue": {
						"acc,none": 0.5924877424778993,
						"acc_stderr,none": 0.09863420636607939,
						"alias": "glue",
						"f1,none": 0.6846939032354279,
						"f1_stderr,none": 0.00016860913785430294,
						"mcc,none": 0.08057923504646884,
						"mcc_stderr,none": 0.0009909165844571503
					},
					"kmmlu": {
						"acc,none": 0.13517181634421022,
						"acc_norm,none": 0.13517181634421022,
						"acc_norm_stderr,none": 0.053389581332072555,
						"acc_stderr,none": 0.053389581332072555,
						"alias": "kmmlu"
					},
					"kobest": {
						"acc,none": 0.4792808594606446,
						"acc_norm,none": 0.446,
						"acc_norm_stderr,none": 0.0004951583166332648,
						"acc_stderr,none": 0.039158340172767106,
						"alias": "kobest",
						"f1,none": 0.3822009085469602,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6535028138948186,
						"acc_stderr,none": 0.015592601744389837,
						"alias": "lambada",
						"perplexity,none": 4.704267936135776,
						"perplexity_stderr,none": 0.26882463857509087
					},
					"lambada_cloze": {
						"acc,none": 0.08247622744032602,
						"acc_stderr,none": 0.029639166875354016,
						"alias": "lambada_cloze",
						"perplexity,none": 228.08772301889098,
						"perplexity_stderr,none": 46.99911355168256
					},
					"lambada_multilingual": {
						"acc,none": 0.40919852513099164,
						"acc_stderr,none": 0.07881626118050682,
						"alias": "lambada_multilingual",
						"perplexity,none": 68.0827334066118,
						"perplexity_stderr,none": 20.359653449385455
					},
					"mmlu": {
						"acc,none": 0.32317333713146273,
						"acc_stderr,none": 0.05897347516172344,
						"alias": "mmlu"
					},
					"mmlu_humanities": {
						"acc,none": 0.3177470775770457,
						"acc_stderr,none": 0.05524014998901674,
						"alias": " - humanities"
					},
					"mmlu_other": {
						"acc,none": 0.3614419053749597,
						"acc_stderr,none": 0.06498932394179731,
						"alias": " - other"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.32791680207994806,
						"acc_stderr,none": 0.04868272895247769,
						"alias": " - social_sciences"
					},
					"mmlu_stem": {
						"acc,none": 0.28893117665715184,
						"acc_stderr,none": 0.05445770965513818,
						"alias": " - stem"
					},
					"multimedqa": {
						"acc,none": 0.2936834634492548,
						"acc_norm,none": 0.2683186135984066,
						"acc_norm_stderr,none": 8.600489523348352e-05,
						"acc_stderr,none": 0.07174932081788213,
						"alias": "stem"
					},
					"pawsx": {
						"acc,none": 0.473,
						"acc_stderr,none": 0.04369243553504766,
						"alias": "pawsx"
					},
					"pythia": {
						"acc,none": 0.7211627332042874,
						"acc_norm,none": 0.6209434214661425,
						"acc_norm_stderr,none": 0.01260100101361836,
						"acc_stderr,none": 0.1559323372737713,
						"alias": "pythia",
						"bits_per_byte,none": 0.6254666222299659,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.542709715488212,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.206237682827478,
						"perplexity_stderr,none": 0.0927437074622225,
						"word_perplexity,none": 10.158756334994626,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.41134751773049644,
						"acc_norm,none": 0.45567375886524825,
						"acc_norm_stderr,none": 0.05320843427753255,
						"acc_stderr,none": 0.039262787617228866,
						"alias": "qa4mre"
					},
					"sycophancy": {
						"acc,none": 0.5383514691690793,
						"acc_stderr,none": 0.025050820977546028,
						"alias": "sycophancy"
					},
					"truthfulqa": {
						"acc,none": 0.3055022521837347,
						"acc_stderr,none": 0.03656239278774286,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3011015911872705,
						"bleu_acc_stderr,none": 0.00025789144972030077,
						"bleu_diff,none": -9.001934630605374,
						"bleu_diff_stderr,none": 0.609706773931244,
						"bleu_max,none": 25.803721470815315,
						"bleu_max_stderr,none": 0.5939472986331531,
						"rouge1_acc,none": 0.2766217870257038,
						"rouge1_acc_stderr,none": 0.00024522325241226745,
						"rouge1_diff,none": -11.768159276195956,
						"rouge1_diff_stderr,none": 0.6596934811127005,
						"rouge1_max,none": 51.491984755493874,
						"rouge1_max_stderr,none": 0.7115419760726746,
						"rouge2_acc,none": 0.2460220318237454,
						"rouge2_acc_stderr,none": 0.00022732253882482836,
						"rouge2_diff,none": -13.766156782955843,
						"rouge2_diff_stderr,none": 0.9754279826781366,
						"rouge2_max,none": 35.032337261676204,
						"rouge2_max_stderr,none": 0.9710174456585365,
						"rougeL_acc,none": 0.26805385556915545,
						"rougeL_acc_stderr,none": 0.00024044238490645661,
						"rougeL_diff,none": -12.018707216661447,
						"rougeL_diff_stderr,none": 0.6824768950198414,
						"rougeL_max,none": 48.40620987330686,
						"rougeL_max_stderr,none": 0.7391060437300081
					},
					"xcopa": {
						"acc,none": 0.5285454545454547,
						"acc_stderr,none": 0.036469924902244086,
						"alias": "xcopa"
					},
					"xnli": {
						"acc,none": 0.3856760374832664,
						"acc_stderr,none": 0.05028935202602867,
						"alias": "xnli"
					},
					"xstorycloze": {
						"acc,none": 0.5594127910474701,
						"acc_stderr,none": 0.0925333616404555,
						"alias": "xstorycloze"
					},
					"xwinograd": {
						"acc,none": 0.7412901775679928,
						"acc_stderr,none": 0.06363409522532021,
						"alias": "xwinograd"
					}
				},
				"results": {
					"ai2_arc": {
						"acc,none": 0.600620067643743,
						"acc_norm,none": 0.6164036076662909,
						"acc_norm_stderr,none": 0.10272219392035134,
						"acc_stderr,none": 0.10212982681108428,
						"alias": " - ai2_arc"
					},
					"anli": {
						"acc,none": 0.325,
						"acc_stderr,none": 0.014605967635886613,
						"alias": "anli"
					},
					"anli_r1": {
						"acc,none": 0.332,
						"acc_stderr,none": 0.01489959724281149,
						"alias": " - anli_r1"
					},
					"anli_r2": {
						"acc,none": 0.324,
						"acc_stderr,none": 0.014806864733738864,
						"alias": " - anli_r2"
					},
					"anli_r3": {
						"acc,none": 0.32,
						"acc_stderr,none": 0.013471620929769145,
						"alias": " - anli_r3"
					},
					"arc_challenge": {
						"acc,none": 0.3848122866894198,
						"acc_norm,none": 0.3993174061433447,
						"acc_norm_stderr,none": 0.014312094557946704,
						"acc_stderr,none": 0.014218371065251104,
						"alias": "  - arc_challenge"
					},
					"arc_easy": {
						"acc,none": 0.7070707070707071,
						"acc_norm,none": 0.7234848484848485,
						"acc_norm_stderr,none": 0.00917788010146828,
						"acc_stderr,none": 0.009338583737393606,
						"alias": "  - arc_easy"
					},
					"arithmetic": {
						"acc,none": 0.15695,
						"acc_stderr,none": 0.18340546292926357,
						"alias": "arithmetic"
					},
					"arithmetic_1dc": {
						"acc,none": 0.066,
						"acc_stderr,none": 0.005553144938623085,
						"alias": "arithmetic_1dc"
					},
					"arithmetic_2da": {
						"acc,none": 0.6695,
						"acc_stderr,none": 0.010520941978266642,
						"alias": "arithmetic_2da"
					},
					"arithmetic_2dm": {
						"acc,none": 0.165,
						"acc_stderr,none": 0.008301925137008155,
						"alias": "arithmetic_2dm"
					},
					"arithmetic_2ds": {
						"acc,none": 0.448,
						"acc_stderr,none": 0.01112249319745628,
						"alias": "arithmetic_2ds"
					},
					"arithmetic_3da": {
						"acc,none": 0.112,
						"acc_stderr,none": 0.007053571892184717,
						"alias": "arithmetic_3da"
					},
					"arithmetic_3ds": {
						"acc,none": 0.0965,
						"acc_stderr,none": 0.006604217049841648,
						"alias": "arithmetic_3ds"
					},
					"arithmetic_4da": {
						"acc,none": 0.006,
						"acc_stderr,none": 0.001727278711115508,
						"alias": "arithmetic_4da"
					},
					"arithmetic_4ds": {
						"acc,none": 0.006,
						"acc_stderr,none": 0.00172727871111552,
						"alias": "arithmetic_4ds"
					},
					"arithmetic_5da": {
						"acc,none": 0.0005,
						"acc_stderr,none": 0.000500000000000003,
						"alias": "arithmetic_5da"
					},
					"arithmetic_5ds": {
						"acc,none": 0.0,
						"acc_stderr,none": 0.0,
						"alias": "arithmetic_5ds"
					},
					"asdiv": {
						"acc,none": 0.00824295010845987,
						"acc_stderr,none": 0.0018836610014054645,
						"alias": "asdiv"
					},
					"blimp": {
						"acc,none": 0.815955223880597,
						"acc_stderr,none": 0.16557624951270272,
						"alias": " - blimp"
					},
					"blimp_adjunct_island": {
						"acc,none": 0.768,
						"acc_stderr,none": 0.013354937452281557,
						"alias": "  - blimp_adjunct_island"
					},
					"blimp_anaphor_gender_agreement": {
						"acc,none": 0.972,
						"acc_stderr,none": 0.0052195060344100395,
						"alias": "  - blimp_anaphor_gender_agreement"
					},
					"blimp_anaphor_number_agreement": {
						"acc,none": 0.991,
						"acc_stderr,none": 0.002987963843142669,
						"alias": "  - blimp_anaphor_number_agreement"
					},
					"blimp_animate_subject_passive": {
						"acc,none": 0.797,
						"acc_stderr,none": 0.012726073744598285,
						"alias": "  - blimp_animate_subject_passive"
					},
					"blimp_animate_subject_trans": {
						"acc,none": 0.848,
						"acc_stderr,none": 0.01135891830347529,
						"alias": "  - blimp_animate_subject_trans"
					},
					"blimp_causative": {
						"acc,none": 0.739,
						"acc_stderr,none": 0.013895037677965138,
						"alias": "  - blimp_causative"
					},
					"blimp_complex_NP_island": {
						"acc,none": 0.579,
						"acc_stderr,none": 0.015620595475301317,
						"alias": "  - blimp_complex_NP_island"
					},
					"blimp_coordinate_structure_constraint_complex_left_branch": {
						"acc,none": 0.676,
						"acc_stderr,none": 0.014806864733738863,
						"alias": "  - blimp_coordinate_structure_constraint_complex_left_branch"
					},
					"blimp_coordinate_structure_constraint_object_extraction": {
						"acc,none": 0.828,
						"acc_stderr,none": 0.011939788882495321,
						"alias": "  - blimp_coordinate_structure_constraint_object_extraction"
					},
					"blimp_determiner_noun_agreement_1": {
						"acc,none": 0.995,
						"acc_stderr,none": 0.0022315868748448817,
						"alias": "  - blimp_determiner_noun_agreement_1"
					},
					"blimp_determiner_noun_agreement_2": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275288,
						"alias": "  - blimp_determiner_noun_agreement_2"
					},
					"blimp_determiner_noun_agreement_irregular_1": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.007335175853706823,
						"alias": "  - blimp_determiner_noun_agreement_irregular_1"
					},
					"blimp_determiner_noun_agreement_irregular_2": {
						"acc,none": 0.944,
						"acc_stderr,none": 0.007274401481697051,
						"alias": "  - blimp_determiner_noun_agreement_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adj_2": {
						"acc,none": 0.928,
						"acc_stderr,none": 0.008178195576218681,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_2"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557421,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_1"
					},
					"blimp_determiner_noun_agreement_with_adj_irregular_2": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524289,
						"alias": "  - blimp_determiner_noun_agreement_with_adj_irregular_2"
					},
					"blimp_determiner_noun_agreement_with_adjective_1": {
						"acc,none": 0.981,
						"acc_stderr,none": 0.004319451082910633,
						"alias": "  - blimp_determiner_noun_agreement_with_adjective_1"
					},
					"blimp_distractor_agreement_relational_noun": {
						"acc,none": 0.873,
						"acc_stderr,none": 0.01053479862085575,
						"alias": "  - blimp_distractor_agreement_relational_noun"
					},
					"blimp_distractor_agreement_relative_clause": {
						"acc,none": 0.749,
						"acc_stderr,none": 0.013718133516888924,
						"alias": "  - blimp_distractor_agreement_relative_clause"
					},
					"blimp_drop_argument": {
						"acc,none": 0.798,
						"acc_stderr,none": 0.01270265158765513,
						"alias": "  - blimp_drop_argument"
					},
					"blimp_ellipsis_n_bar_1": {
						"acc,none": 0.806,
						"acc_stderr,none": 0.01251081614126437,
						"alias": "  - blimp_ellipsis_n_bar_1"
					},
					"blimp_ellipsis_n_bar_2": {
						"acc,none": 0.937,
						"acc_stderr,none": 0.007687007876286436,
						"alias": "  - blimp_ellipsis_n_bar_2"
					},
					"blimp_existential_there_object_raising": {
						"acc,none": 0.858,
						"acc_stderr,none": 0.01104345769937823,
						"alias": "  - blimp_existential_there_object_raising"
					},
					"blimp_existential_there_quantifiers_1": {
						"acc,none": 0.973,
						"acc_stderr,none": 0.005128089049275286,
						"alias": "  - blimp_existential_there_quantifiers_1"
					},
					"blimp_existential_there_quantifiers_2": {
						"acc,none": 0.17,
						"acc_stderr,none": 0.011884495834541662,
						"alias": "  - blimp_existential_there_quantifiers_2"
					},
					"blimp_existential_there_subject_raising": {
						"acc,none": 0.88,
						"acc_stderr,none": 0.010281328012747412,
						"alias": "  - blimp_existential_there_subject_raising"
					},
					"blimp_expletive_it_object_raising": {
						"acc,none": 0.797,
						"acc_stderr,none": 0.012726073744598283,
						"alias": "  - blimp_expletive_it_object_raising"
					},
					"blimp_inchoative": {
						"acc,none": 0.692,
						"acc_stderr,none": 0.014606483127342763,
						"alias": "  - blimp_inchoative"
					},
					"blimp_intransitive": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.011884495834541662,
						"alias": "  - blimp_intransitive"
					},
					"blimp_irregular_past_participle_adjectives": {
						"acc,none": 0.958,
						"acc_stderr,none": 0.006346359293033843,
						"alias": "  - blimp_irregular_past_participle_adjectives"
					},
					"blimp_irregular_past_participle_verbs": {
						"acc,none": 0.896,
						"acc_stderr,none": 0.009658016218524284,
						"alias": "  - blimp_irregular_past_participle_verbs"
					},
					"blimp_irregular_plural_subject_verb_agreement_1": {
						"acc,none": 0.923,
						"acc_stderr,none": 0.008434580140240632,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_1"
					},
					"blimp_irregular_plural_subject_verb_agreement_2": {
						"acc,none": 0.89,
						"acc_stderr,none": 0.00989939381972441,
						"alias": "  - blimp_irregular_plural_subject_verb_agreement_2"
					},
					"blimp_left_branch_island_echo_question": {
						"acc,none": 0.505,
						"acc_stderr,none": 0.01581850894443665,
						"alias": "  - blimp_left_branch_island_echo_question"
					},
					"blimp_left_branch_island_simple_question": {
						"acc,none": 0.791,
						"acc_stderr,none": 0.012864077288499323,
						"alias": "  - blimp_left_branch_island_simple_question"
					},
					"blimp_matrix_question_npi_licensor_present": {
						"acc,none": 0.595,
						"acc_stderr,none": 0.015531136990453043,
						"alias": "  - blimp_matrix_question_npi_licensor_present"
					},
					"blimp_npi_present_1": {
						"acc,none": 0.62,
						"acc_stderr,none": 0.015356947477797582,
						"alias": "  - blimp_npi_present_1"
					},
					"blimp_npi_present_2": {
						"acc,none": 0.675,
						"acc_stderr,none": 0.014818724459095524,
						"alias": "  - blimp_npi_present_2"
					},
					"blimp_only_npi_licensor_present": {
						"acc,none": 0.859,
						"acc_stderr,none": 0.011010914595992438,
						"alias": "  - blimp_only_npi_licensor_present"
					},
					"blimp_only_npi_scope": {
						"acc,none": 0.851,
						"acc_stderr,none": 0.011266140684632156,
						"alias": "  - blimp_only_npi_scope"
					},
					"blimp_passive_1": {
						"acc,none": 0.877,
						"acc_stderr,none": 0.01039129342184988,
						"alias": "  - blimp_passive_1"
					},
					"blimp_passive_2": {
						"acc,none": 0.894,
						"acc_stderr,none": 0.009739551265785134,
						"alias": "  - blimp_passive_2"
					},
					"blimp_principle_A_c_command": {
						"acc,none": 0.814,
						"acc_stderr,none": 0.01231079020841279,
						"alias": "  - blimp_principle_A_c_command"
					},
					"blimp_principle_A_case_1": {
						"acc,none": 1.0,
						"acc_stderr,none": 0.0,
						"alias": "  - blimp_principle_A_case_1"
					},
					"blimp_principle_A_case_2": {
						"acc,none": 0.943,
						"acc_stderr,none": 0.0073351758537068355,
						"alias": "  - blimp_principle_A_case_2"
					},
					"blimp_principle_A_domain_1": {
						"acc,none": 0.993,
						"acc_stderr,none": 0.002637794146243768,
						"alias": "  - blimp_principle_A_domain_1"
					},
					"blimp_principle_A_domain_2": {
						"acc,none": 0.865,
						"acc_stderr,none": 0.010811655372416051,
						"alias": "  - blimp_principle_A_domain_2"
					},
					"blimp_principle_A_domain_3": {
						"acc,none": 0.706,
						"acc_stderr,none": 0.01441429054000822,
						"alias": "  - blimp_principle_A_domain_3"
					},
					"blimp_principle_A_reconstruction": {
						"acc,none": 0.361,
						"acc_stderr,none": 0.015195720118175118,
						"alias": "  - blimp_principle_A_reconstruction"
					},
					"blimp_regular_plural_subject_verb_agreement_1": {
						"acc,none": 0.939,
						"acc_stderr,none": 0.007572076091557418,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_1"
					},
					"blimp_regular_plural_subject_verb_agreement_2": {
						"acc,none": 0.899,
						"acc_stderr,none": 0.009533618929340994,
						"alias": "  - blimp_regular_plural_subject_verb_agreement_2"
					},
					"blimp_sentential_negation_npi_licensor_present": {
						"acc,none": 0.986,
						"acc_stderr,none": 0.0037172325482565925,
						"alias": "  - blimp_sentential_negation_npi_licensor_present"
					},
					"blimp_sentential_negation_npi_scope": {
						"acc,none": 0.765,
						"acc_stderr,none": 0.01341472903024714,
						"alias": "  - blimp_sentential_negation_npi_scope"
					},
					"blimp_sentential_subject_island": {
						"acc,none": 0.399,
						"acc_stderr,none": 0.015493193313162906,
						"alias": "  - blimp_sentential_subject_island"
					},
					"blimp_superlative_quantifiers_1": {
						"acc,none": 0.926,
						"acc_stderr,none": 0.00828206451270417,
						"alias": "  - blimp_superlative_quantifiers_1"
					},
					"blimp_superlative_quantifiers_2": {
						"acc,none": 0.961,
						"acc_stderr,none": 0.0061250727764260975,
						"alias": "  - blimp_superlative_quantifiers_2"
					},
					"blimp_tough_vs_raising_1": {
						"acc,none": 0.656,
						"acc_stderr,none": 0.01502963372440895,
						"alias": "  - blimp_tough_vs_raising_1"
					},
					"blimp_tough_vs_raising_2": {
						"acc,none": 0.85,
						"acc_stderr,none": 0.011297239823409303,
						"alias": "  - blimp_tough_vs_raising_2"
					},
					"blimp_transitive": {
						"acc,none": 0.901,
						"acc_stderr,none": 0.009449248027662725,
						"alias": "  - blimp_transitive"
					},
					"blimp_wh_island": {
						"acc,none": 0.841,
						"acc_stderr,none": 0.011569479368271319,
						"alias": "  - blimp_wh_island"
					},
					"blimp_wh_questions_object_gap": {
						"acc,none": 0.812,
						"acc_stderr,none": 0.01236158601510376,
						"alias": "  - blimp_wh_questions_object_gap"
					},
					"blimp_wh_questions_subject_gap": {
						"acc,none": 0.906,
						"acc_stderr,none": 0.009233052000787726,
						"alias": "  - blimp_wh_questions_subject_gap"
					},
					"blimp_wh_questions_subject_gap_long_distance": {
						"acc,none": 0.904,
						"acc_stderr,none": 0.009320454434783246,
						"alias": "  - blimp_wh_questions_subject_gap_long_distance"
					},
					"blimp_wh_vs_that_no_gap": {
						"acc,none": 0.965,
						"acc_stderr,none": 0.005814534272734937,
						"alias": "  - blimp_wh_vs_that_no_gap"
					},
					"blimp_wh_vs_that_no_gap_long_distance": {
						"acc,none": 0.96,
						"acc_stderr,none": 0.006199874066337073,
						"alias": "  - blimp_wh_vs_that_no_gap_long_distance"
					},
					"blimp_wh_vs_that_with_gap": {
						"acc,none": 0.439,
						"acc_stderr,none": 0.015701131345400774,
						"alias": "  - blimp_wh_vs_that_with_gap"
					},
					"blimp_wh_vs_that_with_gap_long_distance": {
						"acc,none": 0.384,
						"acc_stderr,none": 0.015387682761897064,
						"alias": "  - blimp_wh_vs_that_with_gap_long_distance"
					},
					"boolq": {
						"acc,none": 0.689908256880734,
						"acc_stderr,none": 0.008089716685417728,
						"alias": "boolq"
					},
					"cb": {
						"acc,none": 0.6428571428571429,
						"acc_stderr,none": 0.06460957383809221,
						"alias": "cb",
						"f1,none": 0.4618055555555555,
						"f1_stderr,none": "N/A"
					},
					"ceval-valid": {
						"acc,none": 0.26745913818722133,
						"acc_norm,none": 0.26745913818722133,
						"acc_norm_stderr,none": 0.12340262880518994,
						"acc_stderr,none": 0.12340262880518994,
						"alias": "ceval-valid"
					},
					"ceval-valid_accountant": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.06372446937141221,
						"acc_stderr,none": 0.06372446937141221,
						"alias": " - ceval-valid_accountant"
					},
					"ceval-valid_advanced_mathematics": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_advanced_mathematics"
					},
					"ceval-valid_art_studies": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.07575757575757575,
						"acc_stderr,none": 0.07575757575757575,
						"alias": " - ceval-valid_art_studies"
					},
					"ceval-valid_basic_medicine": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_basic_medicine"
					},
					"ceval-valid_business_administration": {
						"acc,none": 0.36363636363636365,
						"acc_norm,none": 0.36363636363636365,
						"acc_norm_stderr,none": 0.08503766788122595,
						"acc_stderr,none": 0.08503766788122595,
						"alias": " - ceval-valid_business_administration"
					},
					"ceval-valid_chinese_language_and_literature": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0879391124952055,
						"acc_stderr,none": 0.0879391124952055,
						"alias": " - ceval-valid_chinese_language_and_literature"
					},
					"ceval-valid_civil_servant": {
						"acc,none": 0.3191489361702128,
						"acc_norm,none": 0.3191489361702128,
						"acc_norm_stderr,none": 0.0687296045180637,
						"acc_stderr,none": 0.0687296045180637,
						"alias": " - ceval-valid_civil_servant"
					},
					"ceval-valid_clinical_medicine": {
						"acc,none": 0.4090909090909091,
						"acc_norm,none": 0.4090909090909091,
						"acc_norm_stderr,none": 0.10729033533674223,
						"acc_stderr,none": 0.10729033533674223,
						"alias": " - ceval-valid_clinical_medicine"
					},
					"ceval-valid_college_chemistry": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_college_chemistry"
					},
					"ceval-valid_college_economics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.06060606060606063,
						"acc_stderr,none": 0.06060606060606063,
						"alias": " - ceval-valid_college_economics"
					},
					"ceval-valid_college_physics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_college_physics"
					},
					"ceval-valid_college_programming": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_college_programming"
					},
					"ceval-valid_computer_architecture": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_computer_architecture"
					},
					"ceval-valid_computer_network": {
						"acc,none": 0.42105263157894735,
						"acc_norm,none": 0.42105263157894735,
						"acc_norm_stderr,none": 0.11637279966159299,
						"acc_stderr,none": 0.11637279966159299,
						"alias": " - ceval-valid_computer_network"
					},
					"ceval-valid_discrete_mathematics": {
						"acc,none": 0.1875,
						"acc_norm,none": 0.1875,
						"acc_norm_stderr,none": 0.10077822185373188,
						"acc_stderr,none": 0.10077822185373188,
						"alias": " - ceval-valid_discrete_mathematics"
					},
					"ceval-valid_education_science": {
						"acc,none": 0.13793103448275862,
						"acc_norm,none": 0.13793103448275862,
						"acc_norm_stderr,none": 0.06516628844986677,
						"acc_stderr,none": 0.06516628844986677,
						"alias": " - ceval-valid_education_science"
					},
					"ceval-valid_electrical_engineer": {
						"acc,none": 0.24324324324324326,
						"acc_norm,none": 0.24324324324324326,
						"acc_norm_stderr,none": 0.07150679219093488,
						"acc_stderr,none": 0.07150679219093488,
						"alias": " - ceval-valid_electrical_engineer"
					},
					"ceval-valid_environmental_impact_assessment_engineer": {
						"acc,none": 0.16129032258064516,
						"acc_norm,none": 0.16129032258064516,
						"acc_norm_stderr,none": 0.06715051611181073,
						"acc_stderr,none": 0.06715051611181073,
						"alias": " - ceval-valid_environmental_impact_assessment_engineer"
					},
					"ceval-valid_fire_engineer": {
						"acc,none": 0.16129032258064516,
						"acc_norm,none": 0.16129032258064516,
						"acc_norm_stderr,none": 0.06715051611181073,
						"acc_stderr,none": 0.06715051611181073,
						"alias": " - ceval-valid_fire_engineer"
					},
					"ceval-valid_high_school_biology": {
						"acc,none": 0.3684210526315789,
						"acc_norm,none": 0.3684210526315789,
						"acc_norm_stderr,none": 0.1136972052352256,
						"acc_stderr,none": 0.1136972052352256,
						"alias": " - ceval-valid_high_school_biology"
					},
					"ceval-valid_high_school_chemistry": {
						"acc,none": 0.15789473684210525,
						"acc_norm,none": 0.15789473684210525,
						"acc_norm_stderr,none": 0.08594700851870798,
						"acc_stderr,none": 0.08594700851870798,
						"alias": " - ceval-valid_high_school_chemistry"
					},
					"ceval-valid_high_school_chinese": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_high_school_chinese"
					},
					"ceval-valid_high_school_geography": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.09609167675529229,
						"acc_stderr,none": 0.09609167675529229,
						"alias": " - ceval-valid_high_school_geography"
					},
					"ceval-valid_high_school_history": {
						"acc,none": 0.4,
						"acc_norm,none": 0.4,
						"acc_norm_stderr,none": 0.11239029738980327,
						"acc_stderr,none": 0.11239029738980327,
						"alias": " - ceval-valid_high_school_history"
					},
					"ceval-valid_high_school_mathematics": {
						"acc,none": 0.2777777777777778,
						"acc_norm,none": 0.2777777777777778,
						"acc_norm_stderr,none": 0.1086324845659782,
						"acc_stderr,none": 0.1086324845659782,
						"alias": " - ceval-valid_high_school_mathematics"
					},
					"ceval-valid_high_school_physics": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295433,
						"acc_stderr,none": 0.10956136839295433,
						"alias": " - ceval-valid_high_school_physics"
					},
					"ceval-valid_high_school_politics": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.11768778828946262,
						"acc_stderr,none": 0.11768778828946262,
						"alias": " - ceval-valid_high_school_politics"
					},
					"ceval-valid_ideological_and_moral_cultivation": {
						"acc,none": 0.2631578947368421,
						"acc_norm,none": 0.2631578947368421,
						"acc_norm_stderr,none": 0.10379087338771256,
						"acc_stderr,none": 0.10379087338771256,
						"alias": " - ceval-valid_ideological_and_moral_cultivation"
					},
					"ceval-valid_law": {
						"acc,none": 0.3333333333333333,
						"acc_norm,none": 0.3333333333333333,
						"acc_norm_stderr,none": 0.0982946374365981,
						"acc_stderr,none": 0.0982946374365981,
						"alias": " - ceval-valid_law"
					},
					"ceval-valid_legal_professional": {
						"acc,none": 0.30434782608695654,
						"acc_norm,none": 0.30434782608695654,
						"acc_norm_stderr,none": 0.09810018692482896,
						"acc_stderr,none": 0.09810018692482896,
						"alias": " - ceval-valid_legal_professional"
					},
					"ceval-valid_logic": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_logic"
					},
					"ceval-valid_mao_zedong_thought": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.09028938981432691,
						"acc_stderr,none": 0.09028938981432691,
						"alias": " - ceval-valid_mao_zedong_thought"
					},
					"ceval-valid_marxism": {
						"acc,none": 0.3157894736842105,
						"acc_norm,none": 0.3157894736842105,
						"acc_norm_stderr,none": 0.10956136839295434,
						"acc_stderr,none": 0.10956136839295434,
						"alias": " - ceval-valid_marxism"
					},
					"ceval-valid_metrology_engineer": {
						"acc,none": 0.2916666666666667,
						"acc_norm,none": 0.2916666666666667,
						"acc_norm_stderr,none": 0.09477598811252413,
						"acc_stderr,none": 0.09477598811252413,
						"alias": " - ceval-valid_metrology_engineer"
					},
					"ceval-valid_middle_school_biology": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_biology"
					},
					"ceval-valid_middle_school_chemistry": {
						"acc,none": 0.1,
						"acc_norm,none": 0.1,
						"acc_norm_stderr,none": 0.06882472016116853,
						"acc_stderr,none": 0.06882472016116853,
						"alias": " - ceval-valid_middle_school_chemistry"
					},
					"ceval-valid_middle_school_geography": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.1305582419667734,
						"acc_stderr,none": 0.1305582419667734,
						"alias": " - ceval-valid_middle_school_geography"
					},
					"ceval-valid_middle_school_history": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.09718590614997252,
						"acc_stderr,none": 0.09718590614997252,
						"alias": " - ceval-valid_middle_school_history"
					},
					"ceval-valid_middle_school_mathematics": {
						"acc,none": 0.10526315789473684,
						"acc_norm,none": 0.10526315789473684,
						"acc_norm_stderr,none": 0.07233518641434492,
						"acc_stderr,none": 0.07233518641434492,
						"alias": " - ceval-valid_middle_school_mathematics"
					},
					"ceval-valid_middle_school_physics": {
						"acc,none": 0.631578947368421,
						"acc_norm,none": 0.631578947368421,
						"acc_norm_stderr,none": 0.11369720523522561,
						"acc_stderr,none": 0.11369720523522561,
						"alias": " - ceval-valid_middle_school_physics"
					},
					"ceval-valid_middle_school_politics": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.10101525445522108,
						"acc_stderr,none": 0.10101525445522108,
						"alias": " - ceval-valid_middle_school_politics"
					},
					"ceval-valid_modern_chinese_history": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.08793911249520549,
						"acc_stderr,none": 0.08793911249520549,
						"alias": " - ceval-valid_modern_chinese_history"
					},
					"ceval-valid_operating_system": {
						"acc,none": 0.21052631578947367,
						"acc_norm,none": 0.21052631578947367,
						"acc_norm_stderr,none": 0.0960916767552923,
						"acc_stderr,none": 0.0960916767552923,
						"alias": " - ceval-valid_operating_system"
					},
					"ceval-valid_physician": {
						"acc,none": 0.1836734693877551,
						"acc_norm,none": 0.1836734693877551,
						"acc_norm_stderr,none": 0.055890056888282254,
						"acc_stderr,none": 0.055890056888282254,
						"alias": " - ceval-valid_physician"
					},
					"ceval-valid_plant_protection": {
						"acc,none": 0.18181818181818182,
						"acc_norm,none": 0.18181818181818182,
						"acc_norm_stderr,none": 0.08416546361568647,
						"acc_stderr,none": 0.08416546361568647,
						"alias": " - ceval-valid_plant_protection"
					},
					"ceval-valid_probability_and_statistics": {
						"acc,none": 0.3888888888888889,
						"acc_norm,none": 0.3888888888888889,
						"acc_norm_stderr,none": 0.11823563735376173,
						"acc_stderr,none": 0.11823563735376173,
						"alias": " - ceval-valid_probability_and_statistics"
					},
					"ceval-valid_professional_tour_guide": {
						"acc,none": 0.3448275862068966,
						"acc_norm,none": 0.3448275862068966,
						"acc_norm_stderr,none": 0.08982552969857371,
						"acc_stderr,none": 0.08982552969857371,
						"alias": " - ceval-valid_professional_tour_guide"
					},
					"ceval-valid_sports_science": {
						"acc,none": 0.47368421052631576,
						"acc_norm,none": 0.47368421052631576,
						"acc_norm_stderr,none": 0.1176877882894626,
						"acc_stderr,none": 0.1176877882894626,
						"alias": " - ceval-valid_sports_science"
					},
					"ceval-valid_tax_accountant": {
						"acc,none": 0.16326530612244897,
						"acc_norm,none": 0.16326530612244897,
						"acc_norm_stderr,none": 0.05334825558285076,
						"acc_stderr,none": 0.05334825558285076,
						"alias": " - ceval-valid_tax_accountant"
					},
					"ceval-valid_teacher_qualification": {
						"acc,none": 0.29545454545454547,
						"acc_norm,none": 0.29545454545454547,
						"acc_norm_stderr,none": 0.06957698714453993,
						"acc_stderr,none": 0.06957698714453993,
						"alias": " - ceval-valid_teacher_qualification"
					},
					"ceval-valid_urban_and_rural_planner": {
						"acc,none": 0.1956521739130435,
						"acc_norm,none": 0.1956521739130435,
						"acc_norm_stderr,none": 0.05913682829884973,
						"acc_stderr,none": 0.05913682829884973,
						"alias": " - ceval-valid_urban_and_rural_planner"
					},
					"ceval-valid_veterinary_medicine": {
						"acc,none": 0.2608695652173913,
						"acc_norm,none": 0.2608695652173913,
						"acc_norm_stderr,none": 0.09361833424764439,
						"acc_stderr,none": 0.09361833424764439,
						"alias": " - ceval-valid_veterinary_medicine"
					},
					"cmmlu": {
						"acc,none": 0.2572094629597652,
						"acc_norm,none": 0.2572094629597652,
						"acc_norm_stderr,none": 0.04141957805238283,
						"acc_stderr,none": 0.04141957805238283,
						"alias": "cmmlu"
					},
					"cmmlu_agronomy": {
						"acc,none": 0.27218934911242604,
						"acc_norm,none": 0.27218934911242604,
						"acc_norm_stderr,none": 0.03433919627548534,
						"acc_stderr,none": 0.03433919627548534,
						"alias": " - cmmlu_agronomy"
					},
					"cmmlu_anatomy": {
						"acc,none": 0.25675675675675674,
						"acc_norm,none": 0.25675675675675674,
						"acc_norm_stderr,none": 0.036030290036472144,
						"acc_stderr,none": 0.036030290036472144,
						"alias": " - cmmlu_anatomy"
					},
					"cmmlu_ancient_chinese": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.03470398212814534,
						"acc_stderr,none": 0.03470398212814534,
						"alias": " - cmmlu_ancient_chinese"
					},
					"cmmlu_arts": {
						"acc,none": 0.2875,
						"acc_norm,none": 0.2875,
						"acc_norm_stderr,none": 0.035893251060583956,
						"acc_stderr,none": 0.035893251060583956,
						"alias": " - cmmlu_arts"
					},
					"cmmlu_astronomy": {
						"acc,none": 0.296969696969697,
						"acc_norm,none": 0.296969696969697,
						"acc_norm_stderr,none": 0.03567969772268049,
						"acc_stderr,none": 0.03567969772268049,
						"alias": " - cmmlu_astronomy"
					},
					"cmmlu_business_ethics": {
						"acc,none": 0.2822966507177033,
						"acc_norm,none": 0.2822966507177033,
						"acc_norm_stderr,none": 0.031209993754410453,
						"acc_stderr,none": 0.031209993754410453,
						"alias": " - cmmlu_business_ethics"
					},
					"cmmlu_chinese_civil_service_exam": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.036121818481912725,
						"acc_stderr,none": 0.036121818481912725,
						"alias": " - cmmlu_chinese_civil_service_exam"
					},
					"cmmlu_chinese_driving_rule": {
						"acc,none": 0.3053435114503817,
						"acc_norm,none": 0.3053435114503817,
						"acc_norm_stderr,none": 0.04039314978724561,
						"acc_stderr,none": 0.04039314978724561,
						"alias": " - cmmlu_chinese_driving_rule"
					},
					"cmmlu_chinese_food_culture": {
						"acc,none": 0.22794117647058823,
						"acc_norm,none": 0.22794117647058823,
						"acc_norm_stderr,none": 0.03610519574180446,
						"acc_stderr,none": 0.03610519574180446,
						"alias": " - cmmlu_chinese_food_culture"
					},
					"cmmlu_chinese_foreign_policy": {
						"acc,none": 0.2523364485981308,
						"acc_norm,none": 0.2523364485981308,
						"acc_norm_stderr,none": 0.04218811928205305,
						"acc_stderr,none": 0.04218811928205305,
						"alias": " - cmmlu_chinese_foreign_policy"
					},
					"cmmlu_chinese_history": {
						"acc,none": 0.24458204334365324,
						"acc_norm,none": 0.24458204334365324,
						"acc_norm_stderr,none": 0.02395399754093218,
						"acc_stderr,none": 0.02395399754093218,
						"alias": " - cmmlu_chinese_history"
					},
					"cmmlu_chinese_literature": {
						"acc,none": 0.3088235294117647,
						"acc_norm,none": 0.3088235294117647,
						"acc_norm_stderr,none": 0.03242661719827218,
						"acc_stderr,none": 0.03242661719827218,
						"alias": " - cmmlu_chinese_literature"
					},
					"cmmlu_chinese_teacher_qualification": {
						"acc,none": 0.22346368715083798,
						"acc_norm,none": 0.22346368715083798,
						"acc_norm_stderr,none": 0.03122298091957976,
						"acc_stderr,none": 0.03122298091957976,
						"alias": " - cmmlu_chinese_teacher_qualification"
					},
					"cmmlu_clinical_knowledge": {
						"acc,none": 0.25738396624472576,
						"acc_norm,none": 0.25738396624472576,
						"acc_norm_stderr,none": 0.0284588209914603,
						"acc_stderr,none": 0.0284588209914603,
						"alias": " - cmmlu_clinical_knowledge"
					},
					"cmmlu_college_actuarial_science": {
						"acc,none": 0.25471698113207547,
						"acc_norm,none": 0.25471698113207547,
						"acc_norm_stderr,none": 0.042520162237633115,
						"acc_stderr,none": 0.042520162237633115,
						"alias": " - cmmlu_college_actuarial_science"
					},
					"cmmlu_college_education": {
						"acc,none": 0.32710280373831774,
						"acc_norm,none": 0.32710280373831774,
						"acc_norm_stderr,none": 0.04556837693674772,
						"acc_stderr,none": 0.04556837693674772,
						"alias": " - cmmlu_college_education"
					},
					"cmmlu_college_engineering_hydrology": {
						"acc,none": 0.2358490566037736,
						"acc_norm,none": 0.2358490566037736,
						"acc_norm_stderr,none": 0.04142972007800376,
						"acc_stderr,none": 0.04142972007800376,
						"alias": " - cmmlu_college_engineering_hydrology"
					},
					"cmmlu_college_law": {
						"acc,none": 0.19444444444444445,
						"acc_norm,none": 0.19444444444444445,
						"acc_norm_stderr,none": 0.03826076324884864,
						"acc_stderr,none": 0.03826076324884864,
						"alias": " - cmmlu_college_law"
					},
					"cmmlu_college_mathematics": {
						"acc,none": 0.19047619047619047,
						"acc_norm,none": 0.19047619047619047,
						"acc_norm_stderr,none": 0.038505120955363834,
						"acc_stderr,none": 0.038505120955363834,
						"alias": " - cmmlu_college_mathematics"
					},
					"cmmlu_college_medical_statistics": {
						"acc,none": 0.22641509433962265,
						"acc_norm,none": 0.22641509433962265,
						"acc_norm_stderr,none": 0.040842473153370994,
						"acc_stderr,none": 0.040842473153370994,
						"alias": " - cmmlu_college_medical_statistics"
					},
					"cmmlu_college_medicine": {
						"acc,none": 0.2271062271062271,
						"acc_norm,none": 0.2271062271062271,
						"acc_norm_stderr,none": 0.025403290424595156,
						"acc_stderr,none": 0.025403290424595156,
						"alias": " - cmmlu_college_medicine"
					},
					"cmmlu_computer_science": {
						"acc,none": 0.2107843137254902,
						"acc_norm,none": 0.2107843137254902,
						"acc_norm_stderr,none": 0.028626547912437395,
						"acc_stderr,none": 0.028626547912437395,
						"alias": " - cmmlu_computer_science"
					},
					"cmmlu_computer_security": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.03188578017686398,
						"acc_stderr,none": 0.03188578017686398,
						"alias": " - cmmlu_computer_security"
					},
					"cmmlu_conceptual_physics": {
						"acc,none": 0.2653061224489796,
						"acc_norm,none": 0.2653061224489796,
						"acc_norm_stderr,none": 0.03653847510896055,
						"acc_stderr,none": 0.03653847510896055,
						"alias": " - cmmlu_conceptual_physics"
					},
					"cmmlu_construction_project_management": {
						"acc,none": 0.26618705035971224,
						"acc_norm,none": 0.26618705035971224,
						"acc_norm_stderr,none": 0.03762240935089088,
						"acc_stderr,none": 0.03762240935089088,
						"alias": " - cmmlu_construction_project_management"
					},
					"cmmlu_economics": {
						"acc,none": 0.27044025157232704,
						"acc_norm,none": 0.27044025157232704,
						"acc_norm_stderr,none": 0.03533764101912228,
						"acc_stderr,none": 0.03533764101912228,
						"alias": " - cmmlu_economics"
					},
					"cmmlu_education": {
						"acc,none": 0.22699386503067484,
						"acc_norm,none": 0.22699386503067484,
						"acc_norm_stderr,none": 0.03291099578615767,
						"acc_stderr,none": 0.03291099578615767,
						"alias": " - cmmlu_education"
					},
					"cmmlu_electrical_engineering": {
						"acc,none": 0.27325581395348836,
						"acc_norm,none": 0.27325581395348836,
						"acc_norm_stderr,none": 0.03407826167337437,
						"acc_stderr,none": 0.03407826167337437,
						"alias": " - cmmlu_electrical_engineering"
					},
					"cmmlu_elementary_chinese": {
						"acc,none": 0.25793650793650796,
						"acc_norm,none": 0.25793650793650796,
						"acc_norm_stderr,none": 0.02761468413941454,
						"acc_stderr,none": 0.02761468413941454,
						"alias": " - cmmlu_elementary_chinese"
					},
					"cmmlu_elementary_commonsense": {
						"acc,none": 0.24242424242424243,
						"acc_norm,none": 0.24242424242424243,
						"acc_norm_stderr,none": 0.03053289223393205,
						"acc_stderr,none": 0.03053289223393205,
						"alias": " - cmmlu_elementary_commonsense"
					},
					"cmmlu_elementary_information_and_technology": {
						"acc,none": 0.2815126050420168,
						"acc_norm,none": 0.2815126050420168,
						"acc_norm_stderr,none": 0.029213549414372163,
						"acc_stderr,none": 0.029213549414372163,
						"alias": " - cmmlu_elementary_information_and_technology"
					},
					"cmmlu_elementary_mathematics": {
						"acc,none": 0.20869565217391303,
						"acc_norm,none": 0.20869565217391303,
						"acc_norm_stderr,none": 0.02685410826543965,
						"acc_stderr,none": 0.02685410826543965,
						"alias": " - cmmlu_elementary_mathematics"
					},
					"cmmlu_ethnology": {
						"acc,none": 0.2740740740740741,
						"acc_norm,none": 0.2740740740740741,
						"acc_norm_stderr,none": 0.03853254836552004,
						"acc_stderr,none": 0.03853254836552004,
						"alias": " - cmmlu_ethnology"
					},
					"cmmlu_food_science": {
						"acc,none": 0.2867132867132867,
						"acc_norm,none": 0.2867132867132867,
						"acc_norm_stderr,none": 0.03795000212801782,
						"acc_stderr,none": 0.03795000212801782,
						"alias": " - cmmlu_food_science"
					},
					"cmmlu_genetics": {
						"acc,none": 0.23863636363636365,
						"acc_norm,none": 0.23863636363636365,
						"acc_norm_stderr,none": 0.03222147017899509,
						"acc_stderr,none": 0.03222147017899509,
						"alias": " - cmmlu_genetics"
					},
					"cmmlu_global_facts": {
						"acc,none": 0.24161073825503357,
						"acc_norm,none": 0.24161073825503357,
						"acc_norm_stderr,none": 0.03518627932594347,
						"acc_stderr,none": 0.03518627932594347,
						"alias": " - cmmlu_global_facts"
					},
					"cmmlu_high_school_biology": {
						"acc,none": 0.2603550295857988,
						"acc_norm,none": 0.2603550295857988,
						"acc_norm_stderr,none": 0.03385633936516737,
						"acc_stderr,none": 0.03385633936516737,
						"alias": " - cmmlu_high_school_biology"
					},
					"cmmlu_high_school_chemistry": {
						"acc,none": 0.23484848484848486,
						"acc_norm,none": 0.23484848484848486,
						"acc_norm_stderr,none": 0.03703667194552485,
						"acc_stderr,none": 0.03703667194552485,
						"alias": " - cmmlu_high_school_chemistry"
					},
					"cmmlu_high_school_geography": {
						"acc,none": 0.23728813559322035,
						"acc_norm,none": 0.23728813559322035,
						"acc_norm_stderr,none": 0.03933012549934383,
						"acc_stderr,none": 0.03933012549934383,
						"alias": " - cmmlu_high_school_geography"
					},
					"cmmlu_high_school_mathematics": {
						"acc,none": 0.23170731707317074,
						"acc_norm,none": 0.23170731707317074,
						"acc_norm_stderr,none": 0.03304756158810785,
						"acc_stderr,none": 0.03304756158810785,
						"alias": " - cmmlu_high_school_mathematics"
					},
					"cmmlu_high_school_physics": {
						"acc,none": 0.2636363636363636,
						"acc_norm,none": 0.2636363636363636,
						"acc_norm_stderr,none": 0.04220224692971987,
						"acc_stderr,none": 0.04220224692971987,
						"alias": " - cmmlu_high_school_physics"
					},
					"cmmlu_high_school_politics": {
						"acc,none": 0.2727272727272727,
						"acc_norm,none": 0.2727272727272727,
						"acc_norm_stderr,none": 0.03737392962695623,
						"acc_stderr,none": 0.03737392962695623,
						"alias": " - cmmlu_high_school_politics"
					},
					"cmmlu_human_sexuality": {
						"acc,none": 0.2222222222222222,
						"acc_norm,none": 0.2222222222222222,
						"acc_norm_stderr,none": 0.037184890068181146,
						"acc_stderr,none": 0.037184890068181146,
						"alias": " - cmmlu_human_sexuality"
					},
					"cmmlu_international_law": {
						"acc,none": 0.24864864864864866,
						"acc_norm,none": 0.24864864864864866,
						"acc_norm_stderr,none": 0.031864394925815165,
						"acc_stderr,none": 0.031864394925815165,
						"alias": " - cmmlu_international_law"
					},
					"cmmlu_journalism": {
						"acc,none": 0.2558139534883721,
						"acc_norm,none": 0.2558139534883721,
						"acc_norm_stderr,none": 0.03336605189761062,
						"acc_stderr,none": 0.03336605189761062,
						"alias": " - cmmlu_journalism"
					},
					"cmmlu_jurisprudence": {
						"acc,none": 0.26520681265206814,
						"acc_norm,none": 0.26520681265206814,
						"acc_norm_stderr,none": 0.02180132906974519,
						"acc_stderr,none": 0.02180132906974519,
						"alias": " - cmmlu_jurisprudence"
					},
					"cmmlu_legal_and_moral_basis": {
						"acc,none": 0.29439252336448596,
						"acc_norm,none": 0.29439252336448596,
						"acc_norm_stderr,none": 0.031228791154249903,
						"acc_stderr,none": 0.031228791154249903,
						"alias": " - cmmlu_legal_and_moral_basis"
					},
					"cmmlu_logical": {
						"acc,none": 0.2682926829268293,
						"acc_norm,none": 0.2682926829268293,
						"acc_norm_stderr,none": 0.040113743936211456,
						"acc_stderr,none": 0.040113743936211456,
						"alias": " - cmmlu_logical"
					},
					"cmmlu_machine_learning": {
						"acc,none": 0.26229508196721313,
						"acc_norm,none": 0.26229508196721313,
						"acc_norm_stderr,none": 0.03998929318926593,
						"acc_stderr,none": 0.03998929318926593,
						"alias": " - cmmlu_machine_learning"
					},
					"cmmlu_management": {
						"acc,none": 0.2761904761904762,
						"acc_norm,none": 0.2761904761904762,
						"acc_norm_stderr,none": 0.03092739584327576,
						"acc_stderr,none": 0.03092739584327576,
						"alias": " - cmmlu_management"
					},
					"cmmlu_marketing": {
						"acc,none": 0.24444444444444444,
						"acc_norm,none": 0.24444444444444444,
						"acc_norm_stderr,none": 0.03212157057535213,
						"acc_stderr,none": 0.03212157057535213,
						"alias": " - cmmlu_marketing"
					},
					"cmmlu_marxist_theory": {
						"acc,none": 0.2751322751322751,
						"acc_norm,none": 0.2751322751322751,
						"acc_norm_stderr,none": 0.032570260086303135,
						"acc_stderr,none": 0.032570260086303135,
						"alias": " - cmmlu_marxist_theory"
					},
					"cmmlu_modern_chinese": {
						"acc,none": 0.25862068965517243,
						"acc_norm,none": 0.25862068965517243,
						"acc_norm_stderr,none": 0.040832215386495764,
						"acc_stderr,none": 0.040832215386495764,
						"alias": " - cmmlu_modern_chinese"
					},
					"cmmlu_nutrition": {
						"acc,none": 0.22758620689655173,
						"acc_norm,none": 0.22758620689655173,
						"acc_norm_stderr,none": 0.03493950380131184,
						"acc_stderr,none": 0.03493950380131184,
						"alias": " - cmmlu_nutrition"
					},
					"cmmlu_philosophy": {
						"acc,none": 0.2857142857142857,
						"acc_norm,none": 0.2857142857142857,
						"acc_norm_stderr,none": 0.044298119496145844,
						"acc_stderr,none": 0.044298119496145844,
						"alias": " - cmmlu_philosophy"
					},
					"cmmlu_professional_accounting": {
						"acc,none": 0.28,
						"acc_norm,none": 0.28,
						"acc_norm_stderr,none": 0.03403851773587051,
						"acc_stderr,none": 0.03403851773587051,
						"alias": " - cmmlu_professional_accounting"
					},
					"cmmlu_professional_law": {
						"acc,none": 0.27488151658767773,
						"acc_norm,none": 0.27488151658767773,
						"acc_norm_stderr,none": 0.030808291124780323,
						"acc_stderr,none": 0.030808291124780323,
						"alias": " - cmmlu_professional_law"
					},
					"cmmlu_professional_medicine": {
						"acc,none": 0.26063829787234044,
						"acc_norm,none": 0.26063829787234044,
						"acc_norm_stderr,none": 0.022668978836259786,
						"acc_stderr,none": 0.022668978836259786,
						"alias": " - cmmlu_professional_medicine"
					},
					"cmmlu_professional_psychology": {
						"acc,none": 0.2629310344827586,
						"acc_norm,none": 0.2629310344827586,
						"acc_norm_stderr,none": 0.02896469754454016,
						"acc_stderr,none": 0.02896469754454016,
						"alias": " - cmmlu_professional_psychology"
					},
					"cmmlu_public_relations": {
						"acc,none": 0.26436781609195403,
						"acc_norm,none": 0.26436781609195403,
						"acc_norm_stderr,none": 0.03352830517660786,
						"acc_stderr,none": 0.03352830517660786,
						"alias": " - cmmlu_public_relations"
					},
					"cmmlu_security_study": {
						"acc,none": 0.23703703703703705,
						"acc_norm,none": 0.23703703703703705,
						"acc_norm_stderr,none": 0.03673731683969506,
						"acc_stderr,none": 0.03673731683969506,
						"alias": " - cmmlu_security_study"
					},
					"cmmlu_sociology": {
						"acc,none": 0.252212389380531,
						"acc_norm,none": 0.252212389380531,
						"acc_norm_stderr,none": 0.028952167450890794,
						"acc_stderr,none": 0.028952167450890794,
						"alias": " - cmmlu_sociology"
					},
					"cmmlu_sports_science": {
						"acc,none": 0.2909090909090909,
						"acc_norm,none": 0.2909090909090909,
						"acc_norm_stderr,none": 0.03546563019624337,
						"acc_stderr,none": 0.03546563019624337,
						"alias": " - cmmlu_sports_science"
					},
					"cmmlu_traditional_chinese_medicine": {
						"acc,none": 0.2594594594594595,
						"acc_norm,none": 0.2594594594594595,
						"acc_norm_stderr,none": 0.03231470996617757,
						"acc_stderr,none": 0.03231470996617757,
						"alias": " - cmmlu_traditional_chinese_medicine"
					},
					"cmmlu_virology": {
						"acc,none": 0.21893491124260356,
						"acc_norm,none": 0.21893491124260356,
						"acc_norm_stderr,none": 0.03190409884491233,
						"acc_stderr,none": 0.03190409884491233,
						"alias": " - cmmlu_virology"
					},
					"cmmlu_world_history": {
						"acc,none": 0.21739130434782608,
						"acc_norm,none": 0.21739130434782608,
						"acc_norm_stderr,none": 0.0326086956521739,
						"acc_stderr,none": 0.0326086956521739,
						"alias": " - cmmlu_world_history"
					},
					"cmmlu_world_religions": {
						"acc,none": 0.29375,
						"acc_norm,none": 0.29375,
						"acc_norm_stderr,none": 0.036121818481912725,
						"acc_stderr,none": 0.036121818481912725,
						"alias": " - cmmlu_world_religions"
					},
					"cola": {
						"alias": " - cola",
						"mcc,none": 0.08057923504646884,
						"mcc_stderr,none": 0.03147882755849001
					},
					"copa": {
						"acc,none": 0.83,
						"acc_stderr,none": 0.0377525168068637,
						"alias": "copa"
					},
					"crows_pairs": {
						"alias": "crows_pairs",
						"likelihood_diff,none": 3.590777802623733,
						"likelihood_diff_stderr,none": 0.4303348750203673,
						"pct_stereotype,none": 0.6197078115682767,
						"pct_stereotype_stderr,none": 0.09384099725563327
					},
					"crows_pairs_english": {
						"alias": " - crows_pairs_english",
						"likelihood_diff,none": 3.6496720333929638,
						"likelihood_diff_stderr,none": 0.08169023239217012,
						"pct_stereotype,none": 0.7078115682766846,
						"pct_stereotype_stderr,none": 0.011108446551136946
					},
					"crows_pairs_english_age": {
						"alias": " - crows_pairs_english_age",
						"likelihood_diff,none": 3.9903846153846154,
						"likelihood_diff_stderr,none": 0.35356003190199864,
						"pct_stereotype,none": 0.7692307692307693,
						"pct_stereotype_stderr,none": 0.04441155916843277
					},
					"crows_pairs_english_autre": {
						"alias": " - crows_pairs_english_autre",
						"likelihood_diff,none": 4.738636363636363,
						"likelihood_diff_stderr,none": 1.6300145134475297,
						"pct_stereotype,none": 0.8181818181818182,
						"pct_stereotype_stderr,none": 0.12196734422726124
					},
					"crows_pairs_english_disability": {
						"alias": " - crows_pairs_english_disability",
						"likelihood_diff,none": 6.184615384615385,
						"likelihood_diff_stderr,none": 0.61208475236918,
						"pct_stereotype,none": 0.7538461538461538,
						"pct_stereotype_stderr,none": 0.05384615384615383
					},
					"crows_pairs_english_gender": {
						"alias": " - crows_pairs_english_gender",
						"likelihood_diff,none": 2.718359375,
						"likelihood_diff_stderr,none": 0.14408805023094778,
						"pct_stereotype,none": 0.7125,
						"pct_stereotype_stderr,none": 0.025340548867928987
					},
					"crows_pairs_english_nationality": {
						"alias": " - crows_pairs_english_nationality",
						"likelihood_diff,none": 3.720486111111111,
						"likelihood_diff_stderr,none": 0.21924505500029637,
						"pct_stereotype,none": 0.6435185185185185,
						"pct_stereotype_stderr,none": 0.032664783315272714
					},
					"crows_pairs_english_physical_appearance": {
						"alias": " - crows_pairs_english_physical_appearance",
						"likelihood_diff,none": 4.192708333333333,
						"likelihood_diff_stderr,none": 0.37634196422479993,
						"pct_stereotype,none": 0.8055555555555556,
						"pct_stereotype_stderr,none": 0.046969543993374836
					},
					"crows_pairs_english_race_color": {
						"alias": " - crows_pairs_english_race_color",
						"likelihood_diff,none": 3.4015748031496065,
						"likelihood_diff_stderr,none": 0.13526660628938839,
						"pct_stereotype,none": 0.6318897637795275,
						"pct_stereotype_stderr,none": 0.021419317453594672
					},
					"crows_pairs_english_religion": {
						"alias": " - crows_pairs_english_religion",
						"likelihood_diff,none": 3.5563063063063063,
						"likelihood_diff_stderr,none": 0.3192353150430663,
						"pct_stereotype,none": 0.8108108108108109,
						"pct_stereotype_stderr,none": 0.03734320430852741
					},
					"crows_pairs_english_sexual_orientation": {
						"alias": " - crows_pairs_english_sexual_orientation",
						"likelihood_diff,none": 4.389784946236559,
						"likelihood_diff_stderr,none": 0.43138106218942457,
						"pct_stereotype,none": 0.9032258064516129,
						"pct_stereotype_stderr,none": 0.03082364793244869
					},
					"crows_pairs_english_socioeconomic": {
						"alias": " - crows_pairs_english_socioeconomic",
						"likelihood_diff,none": 4.16578947368421,
						"likelihood_diff_stderr,none": 0.26327846374722325,
						"pct_stereotype,none": 0.7157894736842105,
						"pct_stereotype_stderr,none": 0.032808156735746566
					},
					"crows_pairs_french": {
						"alias": " - crows_pairs_french",
						"likelihood_diff,none": 3.529479725700656,
						"likelihood_diff_stderr,none": 0.08514733819590228,
						"pct_stereotype,none": 0.5324985092426953,
						"pct_stereotype_stderr,none": 0.012187473686331197
					},
					"crows_pairs_french_age": {
						"alias": " - crows_pairs_french_age",
						"likelihood_diff,none": 3.2416666666666667,
						"likelihood_diff_stderr,none": 0.2835879556491485,
						"pct_stereotype,none": 0.4888888888888889,
						"pct_stereotype_stderr,none": 0.05298680599073449
					},
					"crows_pairs_french_autre": {
						"alias": " - crows_pairs_french_autre",
						"likelihood_diff,none": 4.0,
						"likelihood_diff_stderr,none": 1.6211739968939571,
						"pct_stereotype,none": 0.5384615384615384,
						"pct_stereotype_stderr,none": 0.14390989949130545
					},
					"crows_pairs_french_disability": {
						"alias": " - crows_pairs_french_disability",
						"likelihood_diff,none": 4.950757575757576,
						"likelihood_diff_stderr,none": 0.5607141346718444,
						"pct_stereotype,none": 0.6515151515151515,
						"pct_stereotype_stderr,none": 0.059101367791192905
					},
					"crows_pairs_french_gender": {
						"alias": " - crows_pairs_french_gender",
						"likelihood_diff,none": 3.079828660436137,
						"likelihood_diff_stderr,none": 0.1637036811014212,
						"pct_stereotype,none": 0.5171339563862928,
						"pct_stereotype_stderr,none": 0.027934433698537306
					},
					"crows_pairs_french_nationality": {
						"alias": " - crows_pairs_french_nationality",
						"likelihood_diff,none": 3.722332015810277,
						"likelihood_diff_stderr,none": 0.20954241934562534,
						"pct_stereotype,none": 0.38735177865612647,
						"pct_stereotype_stderr,none": 0.030687258758503675
					},
					"crows_pairs_french_physical_appearance": {
						"alias": " - crows_pairs_french_physical_appearance",
						"likelihood_diff,none": 3.375,
						"likelihood_diff_stderr,none": 0.4225233925055482,
						"pct_stereotype,none": 0.5555555555555556,
						"pct_stereotype_stderr,none": 0.05897165471491952
					},
					"crows_pairs_french_race_color": {
						"alias": " - crows_pairs_french_race_color",
						"likelihood_diff,none": 3.276086956521739,
						"likelihood_diff_stderr,none": 0.1609021872460863,
						"pct_stereotype,none": 0.4434782608695652,
						"pct_stereotype_stderr,none": 0.023188405797101477
					},
					"crows_pairs_french_religion": {
						"alias": " - crows_pairs_french_religion",
						"likelihood_diff,none": 3.4239130434782608,
						"likelihood_diff_stderr,none": 0.346899390045465,
						"pct_stereotype,none": 0.7478260869565218,
						"pct_stereotype_stderr,none": 0.04067222754154717
					},
					"crows_pairs_french_sexual_orientation": {
						"alias": " - crows_pairs_french_sexual_orientation",
						"likelihood_diff,none": 3.9368131868131866,
						"likelihood_diff_stderr,none": 0.3341928800154379,
						"pct_stereotype,none": 0.7802197802197802,
						"pct_stereotype_stderr,none": 0.04364972632898533
					},
					"crows_pairs_french_socioeconomic": {
						"alias": " - crows_pairs_french_socioeconomic",
						"likelihood_diff,none": 4.232142857142857,
						"likelihood_diff_stderr,none": 0.2931956075930856,
						"pct_stereotype,none": 0.6836734693877551,
						"pct_stereotype_stderr,none": 0.03330234893102004
					},
					"freebase": {
						"alias": "freebase",
						"exact_match,none": 0.2263779527559055,
						"exact_match_stderr,none": 0.009285953859206367
					},
					"glue": {
						"acc,none": 0.5924877424778993,
						"acc_stderr,none": 0.09863420636607939,
						"alias": "glue",
						"f1,none": 0.6846939032354279,
						"f1_stderr,none": 0.00016860913785430294,
						"mcc,none": 0.08057923504646884,
						"mcc_stderr,none": 0.0009909165844571503
					},
					"gsm8k": {
						"alias": "gsm8k",
						"exact_match,get-answer": 0.028051554207733132,
						"exact_match_stderr,get-answer": 0.004548229533836327
					},
					"hellaswag": {
						"acc,none": 0.5242979486158136,
						"acc_norm,none": 0.7093208524198367,
						"acc_norm_stderr,none": 0.004531477407589657,
						"acc_stderr,none": 0.004983886091690517,
						"alias": "hellaswag"
					},
					"kmmlu": {
						"acc,none": 0.13517181634421022,
						"acc_norm,none": 0.13517181634421022,
						"acc_norm_stderr,none": 0.053389581332072555,
						"acc_stderr,none": 0.053389581332072555,
						"alias": "kmmlu"
					},
					"kmmlu_accounting": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.0416333199893227,
						"acc_stderr,none": 0.0416333199893227,
						"alias": " - kmmlu_accounting"
					},
					"kmmlu_agricultural_sciences": {
						"acc,none": 0.128,
						"acc_norm,none": 0.128,
						"acc_norm_stderr,none": 0.010570133761108666,
						"acc_stderr,none": 0.010570133761108666,
						"alias": " - kmmlu_agricultural_sciences"
					},
					"kmmlu_aviation_engineering_and_maintenance": {
						"acc,none": 0.123,
						"acc_norm,none": 0.123,
						"acc_norm_stderr,none": 0.010391293421849877,
						"acc_stderr,none": 0.010391293421849877,
						"alias": " - kmmlu_aviation_engineering_and_maintenance"
					},
					"kmmlu_biology": {
						"acc,none": 0.219,
						"acc_norm,none": 0.219,
						"acc_norm_stderr,none": 0.013084731950262028,
						"acc_stderr,none": 0.013084731950262028,
						"alias": " - kmmlu_biology"
					},
					"kmmlu_chemical_engineering": {
						"acc,none": 0.204,
						"acc_norm,none": 0.204,
						"acc_norm_stderr,none": 0.012749374359024384,
						"acc_stderr,none": 0.012749374359024384,
						"alias": " - kmmlu_chemical_engineering"
					},
					"kmmlu_chemistry": {
						"acc,none": 0.18833333333333332,
						"acc_norm,none": 0.18833333333333332,
						"acc_norm_stderr,none": 0.015974932830731804,
						"acc_stderr,none": 0.015974932830731804,
						"alias": " - kmmlu_chemistry"
					},
					"kmmlu_civil_engineering": {
						"acc,none": 0.042,
						"acc_norm,none": 0.042,
						"acc_norm_stderr,none": 0.006346359293033834,
						"acc_stderr,none": 0.006346359293033834,
						"alias": " - kmmlu_civil_engineering"
					},
					"kmmlu_computer_science": {
						"acc,none": 0.088,
						"acc_norm,none": 0.088,
						"acc_norm_stderr,none": 0.008963053962592072,
						"acc_stderr,none": 0.008963053962592072,
						"alias": " - kmmlu_computer_science"
					},
					"kmmlu_construction": {
						"acc,none": 0.062,
						"acc_norm,none": 0.062,
						"acc_norm_stderr,none": 0.007629823996280309,
						"acc_stderr,none": 0.007629823996280309,
						"alias": " - kmmlu_construction"
					},
					"kmmlu_criminal_law": {
						"acc,none": 0.22,
						"acc_norm,none": 0.22,
						"acc_norm_stderr,none": 0.029365141882663315,
						"acc_stderr,none": 0.029365141882663315,
						"alias": " - kmmlu_criminal_law"
					},
					"kmmlu_ecology": {
						"acc,none": 0.081,
						"acc_norm,none": 0.081,
						"acc_norm_stderr,none": 0.00863212103213999,
						"acc_stderr,none": 0.00863212103213999,
						"alias": " - kmmlu_ecology"
					},
					"kmmlu_economics": {
						"acc,none": 0.2846153846153846,
						"acc_norm,none": 0.2846153846153846,
						"acc_norm_stderr,none": 0.03972867937362452,
						"acc_stderr,none": 0.03972867937362452,
						"alias": " - kmmlu_economics"
					},
					"kmmlu_education": {
						"acc,none": 0.27,
						"acc_norm,none": 0.27,
						"acc_norm_stderr,none": 0.0446196043338474,
						"acc_stderr,none": 0.0446196043338474,
						"alias": " - kmmlu_education"
					},
					"kmmlu_electrical_engineering": {
						"acc,none": 0.091,
						"acc_norm,none": 0.091,
						"acc_norm_stderr,none": 0.009099549538400229,
						"acc_stderr,none": 0.009099549538400229,
						"alias": " - kmmlu_electrical_engineering"
					},
					"kmmlu_electronics_engineering": {
						"acc,none": 0.069,
						"acc_norm,none": 0.069,
						"acc_norm_stderr,none": 0.008018934050315169,
						"acc_stderr,none": 0.008018934050315169,
						"alias": " - kmmlu_electronics_engineering"
					},
					"kmmlu_energy_management": {
						"acc,none": 0.208,
						"acc_norm,none": 0.208,
						"acc_norm_stderr,none": 0.012841374572096918,
						"acc_stderr,none": 0.012841374572096918,
						"alias": " - kmmlu_energy_management"
					},
					"kmmlu_environmental_science": {
						"acc,none": 0.06,
						"acc_norm,none": 0.06,
						"acc_norm_stderr,none": 0.007513751157474929,
						"acc_stderr,none": 0.007513751157474929,
						"alias": " - kmmlu_environmental_science"
					},
					"kmmlu_fashion": {
						"acc,none": 0.186,
						"acc_norm,none": 0.186,
						"acc_norm_stderr,none": 0.012310790208412796,
						"acc_stderr,none": 0.012310790208412796,
						"alias": " - kmmlu_fashion"
					},
					"kmmlu_food_processing": {
						"acc,none": 0.151,
						"acc_norm,none": 0.151,
						"acc_norm_stderr,none": 0.011328165223341678,
						"acc_stderr,none": 0.011328165223341678,
						"alias": " - kmmlu_food_processing"
					},
					"kmmlu_gas_technology_and_engineering": {
						"acc,none": 0.129,
						"acc_norm,none": 0.129,
						"acc_norm_stderr,none": 0.010605256784796572,
						"acc_stderr,none": 0.010605256784796572,
						"alias": " - kmmlu_gas_technology_and_engineering"
					},
					"kmmlu_geomatics": {
						"acc,none": 0.129,
						"acc_norm,none": 0.129,
						"acc_norm_stderr,none": 0.010605256784796579,
						"acc_stderr,none": 0.010605256784796579,
						"alias": " - kmmlu_geomatics"
					},
					"kmmlu_health": {
						"acc,none": 0.3,
						"acc_norm,none": 0.3,
						"acc_norm_stderr,none": 0.046056618647183814,
						"acc_stderr,none": 0.046056618647183814,
						"alias": " - kmmlu_health"
					},
					"kmmlu_industrial_engineer": {
						"acc,none": 0.061,
						"acc_norm,none": 0.061,
						"acc_norm_stderr,none": 0.007572076091557424,
						"acc_stderr,none": 0.007572076091557424,
						"alias": " - kmmlu_industrial_engineer"
					},
					"kmmlu_information_technology": {
						"acc,none": 0.079,
						"acc_norm,none": 0.079,
						"acc_norm_stderr,none": 0.008534156773333449,
						"acc_stderr,none": 0.008534156773333449,
						"alias": " - kmmlu_information_technology"
					},
					"kmmlu_interior_architecture_and_design": {
						"acc,none": 0.107,
						"acc_norm,none": 0.107,
						"acc_norm_stderr,none": 0.009779910359847169,
						"acc_stderr,none": 0.009779910359847169,
						"alias": " - kmmlu_interior_architecture_and_design"
					},
					"kmmlu_law": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.013414729030247114,
						"acc_stderr,none": 0.013414729030247114,
						"alias": " - kmmlu_law"
					},
					"kmmlu_machine_design_and_manufacturing": {
						"acc,none": 0.105,
						"acc_norm,none": 0.105,
						"acc_norm_stderr,none": 0.009698921026024945,
						"acc_stderr,none": 0.009698921026024945,
						"alias": " - kmmlu_machine_design_and_manufacturing"
					},
					"kmmlu_management": {
						"acc,none": 0.198,
						"acc_norm,none": 0.198,
						"acc_norm_stderr,none": 0.012607733934175303,
						"acc_stderr,none": 0.012607733934175303,
						"alias": " - kmmlu_management"
					},
					"kmmlu_maritime_engineering": {
						"acc,none": 0.175,
						"acc_norm,none": 0.175,
						"acc_norm_stderr,none": 0.01552503498177411,
						"acc_stderr,none": 0.01552503498177411,
						"alias": " - kmmlu_maritime_engineering"
					},
					"kmmlu_marketing": {
						"acc,none": 0.132,
						"acc_norm,none": 0.132,
						"acc_norm_stderr,none": 0.010709373963528024,
						"acc_stderr,none": 0.010709373963528024,
						"alias": " - kmmlu_marketing"
					},
					"kmmlu_materials_engineering": {
						"acc,none": 0.122,
						"acc_norm,none": 0.122,
						"acc_norm_stderr,none": 0.010354864712936722,
						"acc_stderr,none": 0.010354864712936722,
						"alias": " - kmmlu_materials_engineering"
					},
					"kmmlu_mechanical_engineering": {
						"acc,none": 0.109,
						"acc_norm,none": 0.109,
						"acc_norm_stderr,none": 0.00985982840703719,
						"acc_stderr,none": 0.00985982840703719,
						"alias": " - kmmlu_mechanical_engineering"
					},
					"kmmlu_nondestructive_testing": {
						"acc,none": 0.126,
						"acc_norm,none": 0.126,
						"acc_norm_stderr,none": 0.010499249222408054,
						"acc_stderr,none": 0.010499249222408054,
						"alias": " - kmmlu_nondestructive_testing"
					},
					"kmmlu_patent": {
						"acc,none": 0.25,
						"acc_norm,none": 0.25,
						"acc_norm_stderr,none": 0.04351941398892446,
						"acc_stderr,none": 0.04351941398892446,
						"alias": " - kmmlu_patent"
					},
					"kmmlu_political_science_and_sociology": {
						"acc,none": 0.23,
						"acc_norm,none": 0.23,
						"acc_norm_stderr,none": 0.024337372337779075,
						"acc_stderr,none": 0.024337372337779075,
						"alias": " - kmmlu_political_science_and_sociology"
					},
					"kmmlu_psychology": {
						"acc,none": 0.235,
						"acc_norm,none": 0.235,
						"acc_norm_stderr,none": 0.013414729030247114,
						"acc_stderr,none": 0.013414729030247114,
						"alias": " - kmmlu_psychology"
					},
					"kmmlu_public_safety": {
						"acc,none": 0.08,
						"acc_norm,none": 0.08,
						"acc_norm_stderr,none": 0.008583336977753653,
						"acc_stderr,none": 0.008583336977753653,
						"alias": " - kmmlu_public_safety"
					},
					"kmmlu_railway_and_automotive_engineering": {
						"acc,none": 0.142,
						"acc_norm,none": 0.142,
						"acc_norm_stderr,none": 0.01104345769937821,
						"acc_stderr,none": 0.01104345769937821,
						"alias": " - kmmlu_railway_and_automotive_engineering"
					},
					"kmmlu_real_estate": {
						"acc,none": 0.19,
						"acc_norm,none": 0.19,
						"acc_norm_stderr,none": 0.02780947382046009,
						"acc_stderr,none": 0.02780947382046009,
						"alias": " - kmmlu_real_estate"
					},
					"kmmlu_refrigerating_machinery": {
						"acc,none": 0.153,
						"acc_norm,none": 0.153,
						"acc_norm_stderr,none": 0.011389500459665542,
						"acc_stderr,none": 0.011389500459665542,
						"alias": " - kmmlu_refrigerating_machinery"
					},
					"kmmlu_social_welfare": {
						"acc,none": 0.188,
						"acc_norm,none": 0.188,
						"acc_norm_stderr,none": 0.012361586015103777,
						"acc_stderr,none": 0.012361586015103777,
						"alias": " - kmmlu_social_welfare"
					},
					"kmmlu_taxation": {
						"acc,none": 0.2,
						"acc_norm,none": 0.2,
						"acc_norm_stderr,none": 0.028355248200333392,
						"acc_stderr,none": 0.028355248200333392,
						"alias": " - kmmlu_taxation"
					},
					"kmmlu_telecommunications_and_wireless_technology": {
						"acc,none": 0.089,
						"acc_norm,none": 0.089,
						"acc_norm_stderr,none": 0.009008893392651528,
						"acc_stderr,none": 0.009008893392651528,
						"alias": " - kmmlu_telecommunications_and_wireless_technology"
					},
					"kobest": {
						"acc,none": 0.4792808594606446,
						"acc_norm,none": 0.446,
						"acc_norm_stderr,none": 0.0004951583166332648,
						"acc_stderr,none": 0.039158340172767106,
						"alias": "kobest",
						"f1,none": 0.3822009085469602,
						"f1_stderr,none": "N/A"
					},
					"kobest_boolq": {
						"acc,none": 0.5021367521367521,
						"acc_stderr,none": 0.013348645604701193,
						"alias": " - kobest_boolq",
						"f1,none": 0.33428165007112376,
						"f1_stderr,none": "N/A"
					},
					"kobest_copa": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.015819173374302702,
						"alias": " - kobest_copa",
						"f1,none": 0.5010340018275381,
						"f1_stderr,none": "N/A"
					},
					"kobest_hellaswag": {
						"acc,none": 0.344,
						"acc_norm,none": 0.446,
						"acc_norm_stderr,none": 0.022252153078595897,
						"acc_stderr,none": 0.02126575803797874,
						"alias": " - kobest_hellaswag",
						"f1,none": 0.33978657557310304,
						"f1_stderr,none": "N/A"
					},
					"kobest_sentineg": {
						"acc,none": 0.4836272040302267,
						"acc_stderr,none": 0.02511247082204795,
						"alias": " - kobest_sentineg",
						"f1,none": 0.47778241201193494,
						"f1_stderr,none": "N/A"
					},
					"kobest_wic": {
						"acc,none": 0.4880952380952381,
						"acc_stderr,none": 0.014087502464604053,
						"alias": " - kobest_wic",
						"f1,none": 0.328,
						"f1_stderr,none": "N/A"
					},
					"lambada": {
						"acc,none": 0.6535028138948186,
						"acc_stderr,none": 0.015592601744389837,
						"alias": "lambada",
						"perplexity,none": 4.704267936135776,
						"perplexity_stderr,none": 0.26882463857509087
					},
					"lambada_cloze": {
						"acc,none": 0.08247622744032602,
						"acc_stderr,none": 0.029639166875354016,
						"alias": "lambada_cloze",
						"perplexity,none": 228.08772301889098,
						"perplexity_stderr,none": 46.99911355168256
					},
					"lambada_multilingual": {
						"acc,none": 0.40919852513099164,
						"acc_stderr,none": 0.07881626118050682,
						"alias": "lambada_multilingual",
						"perplexity,none": 68.0827334066118,
						"perplexity_stderr,none": 20.359653449385455
					},
					"lambada_openai": {
						"acc,none": 0.6815447312245294,
						"acc_stderr,none": 0.006490579511276158,
						"alias": " - lambada_openai",
						"perplexity,none": 4.206237682827478,
						"perplexity_stderr,none": 0.0927437074622225
					},
					"lambada_openai_cloze_yaml": {
						"acc,none": 0.023675528818164177,
						"acc_stderr,none": 0.002118161179991251,
						"alias": " - lambada_openai_cloze_yaml",
						"perplexity,none": 320.4035456752187,
						"perplexity_stderr,none": 11.743879589576887
					},
					"lambada_openai_mt_de": {
						"acc,none": 0.30021346788278674,
						"acc_stderr,none": 0.00638572112715347,
						"alias": " - lambada_openai_mt_de",
						"perplexity,none": 98.34496221277986,
						"perplexity_stderr,none": 5.837072596255183
					},
					"lambada_openai_mt_en": {
						"acc,none": 0.6823209780710265,
						"acc_stderr,none": 0.0064863548390796414,
						"alias": " - lambada_openai_mt_en",
						"perplexity,none": 4.21025292817063,
						"perplexity_stderr,none": 0.09293602081447402
					},
					"lambada_openai_mt_es": {
						"acc,none": 0.29400349311080926,
						"acc_stderr,none": 0.006347308312684625,
						"alias": " - lambada_openai_mt_es",
						"perplexity,none": 105.45085810801216,
						"perplexity_stderr,none": 5.872520825310151
					},
					"lambada_openai_mt_fr": {
						"acc,none": 0.4036483601785368,
						"acc_stderr,none": 0.006835414856071572,
						"alias": " - lambada_openai_mt_fr",
						"perplexity,none": 56.27992878126166,
						"perplexity_stderr,none": 3.2288900877011724
					},
					"lambada_openai_mt_it": {
						"acc,none": 0.36580632641179894,
						"acc_stderr,none": 0.006710403442216897,
						"alias": " - lambada_openai_mt_it",
						"perplexity,none": 76.12766500283462,
						"perplexity_stderr,none": 4.513317488958775
					},
					"lambada_standard": {
						"acc,none": 0.6252668348534834,
						"acc_stderr,none": 0.006743817908692021,
						"alias": " - lambada_standard",
						"perplexity,none": 5.196449969983576,
						"perplexity_stderr,none": 0.12144160380391845
					},
					"lambada_standard_cloze_yaml": {
						"acc,none": 0.14127692606248787,
						"acc_stderr,none": 0.004852597359208049,
						"alias": " - lambada_standard_cloze_yaml",
						"perplexity,none": 135.77190036256331,
						"perplexity_stderr,none": 4.290775749076426
					},
					"logieval": {
						"alias": "logieval",
						"exact_match,get-answer": 0.30470737913486007,
						"exact_match_stderr,get-answer": 0.011612806870393318
					},
					"logiqa": {
						"acc,none": 0.2304147465437788,
						"acc_norm,none": 0.2903225806451613,
						"acc_norm_stderr,none": 0.017803862148538022,
						"acc_stderr,none": 0.016516834820590968,
						"alias": " - logiqa"
					},
					"logiqa2": {
						"acc,none": 0.24681933842239187,
						"acc_norm,none": 0.2767175572519084,
						"acc_norm_stderr,none": 0.011287148180222285,
						"acc_stderr,none": 0.010878050728561944,
						"alias": "logiqa2"
					},
					"mathqa": {
						"acc,none": 0.2797319932998325,
						"acc_norm,none": 0.2777219430485762,
						"acc_norm_stderr,none": 0.008198943594859153,
						"acc_stderr,none": 0.008217102848977552,
						"alias": "mathqa"
					},
					"mc_taco": {
						"acc,none": 0.3935606862952764,
						"acc_stderr,none": 0.005027945130125329,
						"alias": "mc_taco",
						"f1,none": 0.42785771382893684,
						"f1_stderr,none": 0.006202728172309808
					},
					"medmcqa": {
						"acc,none": 0.26870666985417163,
						"acc_norm,none": 0.26870666985417163,
						"acc_norm_stderr,none": 0.006854772019061031,
						"acc_stderr,none": 0.006854772019061031,
						"alias": "medmcqa"
					},
					"medqa_4options": {
						"acc,none": 0.26394344069128045,
						"acc_norm,none": 0.26394344069128045,
						"acc_norm_stderr,none": 0.012358548743674928,
						"acc_stderr,none": 0.012358548743674928,
						"alias": "medqa_4options"
					},
					"mmlu": {
						"acc,none": 0.32317333713146273,
						"acc_stderr,none": 0.05897347516172344,
						"alias": "mmlu"
					},
					"mmlu_abstract_algebra": {
						"acc,none": 0.29,
						"acc_stderr,none": 0.045604802157206845,
						"alias": "  - abstract_algebra"
					},
					"mmlu_anatomy": {
						"acc,none": 0.34074074074074073,
						"acc_stderr,none": 0.04094376269996793,
						"alias": "  - anatomy"
					},
					"mmlu_astronomy": {
						"acc,none": 0.34868421052631576,
						"acc_stderr,none": 0.0387813988879761,
						"alias": "  - astronomy"
					},
					"mmlu_business_ethics": {
						"acc,none": 0.38,
						"acc_stderr,none": 0.04878317312145633,
						"alias": "  - business_ethics"
					},
					"mmlu_clinical_knowledge": {
						"acc,none": 0.3471698113207547,
						"acc_stderr,none": 0.029300101705549652,
						"alias": "  - clinical_knowledge"
					},
					"mmlu_college_biology": {
						"acc,none": 0.2569444444444444,
						"acc_stderr,none": 0.03653946969442099,
						"alias": "  - college_biology"
					},
					"mmlu_college_chemistry": {
						"acc,none": 0.19,
						"acc_stderr,none": 0.039427724440366234,
						"alias": "  - college_chemistry"
					},
					"mmlu_college_computer_science": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - college_computer_science"
					},
					"mmlu_college_mathematics": {
						"acc,none": 0.35,
						"acc_stderr,none": 0.047937248544110196,
						"alias": "  - college_mathematics"
					},
					"mmlu_college_medicine": {
						"acc,none": 0.32947976878612717,
						"acc_stderr,none": 0.03583901754736412,
						"alias": "  - college_medicine"
					},
					"mmlu_college_physics": {
						"acc,none": 0.2647058823529412,
						"acc_stderr,none": 0.04389869956808778,
						"alias": "  - college_physics"
					},
					"mmlu_computer_security": {
						"acc,none": 0.33,
						"acc_stderr,none": 0.047258156262526045,
						"alias": "  - computer_security"
					},
					"mmlu_conceptual_physics": {
						"acc,none": 0.33191489361702126,
						"acc_stderr,none": 0.030783736757745653,
						"alias": "  - conceptual_physics"
					},
					"mmlu_econometrics": {
						"acc,none": 0.2543859649122807,
						"acc_stderr,none": 0.040969851398436716,
						"alias": "  - econometrics"
					},
					"mmlu_electrical_engineering": {
						"acc,none": 0.2896551724137931,
						"acc_stderr,none": 0.03780019230438014,
						"alias": "  - electrical_engineering"
					},
					"mmlu_elementary_mathematics": {
						"acc,none": 0.2566137566137566,
						"acc_stderr,none": 0.022494510767503154,
						"alias": "  - elementary_mathematics"
					},
					"mmlu_formal_logic": {
						"acc,none": 0.2619047619047619,
						"acc_stderr,none": 0.03932537680392871,
						"alias": "  - formal_logic"
					},
					"mmlu_global_facts": {
						"acc,none": 0.25,
						"acc_stderr,none": 0.04351941398892446,
						"alias": "  - global_facts"
					},
					"mmlu_high_school_biology": {
						"acc,none": 0.3580645161290323,
						"acc_stderr,none": 0.02727389059430065,
						"alias": "  - high_school_biology"
					},
					"mmlu_high_school_chemistry": {
						"acc,none": 0.2561576354679803,
						"acc_stderr,none": 0.030712730070982592,
						"alias": "  - high_school_chemistry"
					},
					"mmlu_high_school_computer_science": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.046482319871173156,
						"alias": "  - high_school_computer_science"
					},
					"mmlu_high_school_european_history": {
						"acc,none": 0.2727272727272727,
						"acc_stderr,none": 0.03477691162163659,
						"alias": "  - high_school_european_history"
					},
					"mmlu_high_school_geography": {
						"acc,none": 0.3787878787878788,
						"acc_stderr,none": 0.03456088731993747,
						"alias": "  - high_school_geography"
					},
					"mmlu_high_school_government_and_politics": {
						"acc,none": 0.38341968911917096,
						"acc_stderr,none": 0.03508984236295341,
						"alias": "  - high_school_government_and_politics"
					},
					"mmlu_high_school_macroeconomics": {
						"acc,none": 0.2923076923076923,
						"acc_stderr,none": 0.02306043838085772,
						"alias": "  - high_school_macroeconomics"
					},
					"mmlu_high_school_mathematics": {
						"acc,none": 0.25925925925925924,
						"acc_stderr,none": 0.026719240783712163,
						"alias": "  - high_school_mathematics"
					},
					"mmlu_high_school_microeconomics": {
						"acc,none": 0.3403361344537815,
						"acc_stderr,none": 0.030778057422931673,
						"alias": "  - high_school_microeconomics"
					},
					"mmlu_high_school_physics": {
						"acc,none": 0.25165562913907286,
						"acc_stderr,none": 0.035433042343899844,
						"alias": "  - high_school_physics"
					},
					"mmlu_high_school_psychology": {
						"acc,none": 0.3651376146788991,
						"acc_stderr,none": 0.02064280145438401,
						"alias": "  - high_school_psychology"
					},
					"mmlu_high_school_statistics": {
						"acc,none": 0.2175925925925926,
						"acc_stderr,none": 0.02813968944485966,
						"alias": "  - high_school_statistics"
					},
					"mmlu_high_school_us_history": {
						"acc,none": 0.3627450980392157,
						"acc_stderr,none": 0.03374499356319355,
						"alias": "  - high_school_us_history"
					},
					"mmlu_high_school_world_history": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.0306858205966108,
						"alias": "  - high_school_world_history"
					},
					"mmlu_human_aging": {
						"acc,none": 0.3811659192825112,
						"acc_stderr,none": 0.032596251184168264,
						"alias": "  - human_aging"
					},
					"mmlu_human_sexuality": {
						"acc,none": 0.24427480916030533,
						"acc_stderr,none": 0.037683359597287434,
						"alias": "  - human_sexuality"
					},
					"mmlu_humanities": {
						"acc,none": 0.3177470775770457,
						"acc_stderr,none": 0.05524014998901674,
						"alias": " - humanities"
					},
					"mmlu_international_law": {
						"acc,none": 0.45454545454545453,
						"acc_stderr,none": 0.045454545454545456,
						"alias": "  - international_law"
					},
					"mmlu_jurisprudence": {
						"acc,none": 0.3888888888888889,
						"acc_stderr,none": 0.0471282125742677,
						"alias": "  - jurisprudence"
					},
					"mmlu_logical_fallacies": {
						"acc,none": 0.3558282208588957,
						"acc_stderr,none": 0.03761521380046735,
						"alias": "  - logical_fallacies"
					},
					"mmlu_machine_learning": {
						"acc,none": 0.29464285714285715,
						"acc_stderr,none": 0.04327040932578728,
						"alias": "  - machine_learning"
					},
					"mmlu_management": {
						"acc,none": 0.30097087378640774,
						"acc_stderr,none": 0.045416094465039455,
						"alias": "  - management"
					},
					"mmlu_marketing": {
						"acc,none": 0.4700854700854701,
						"acc_stderr,none": 0.03269741106812443,
						"alias": "  - marketing"
					},
					"mmlu_medical_genetics": {
						"acc,none": 0.31,
						"acc_stderr,none": 0.04648231987117316,
						"alias": "  - medical_genetics"
					},
					"mmlu_miscellaneous": {
						"acc,none": 0.4674329501915709,
						"acc_stderr,none": 0.017841995750520867,
						"alias": "  - miscellaneous"
					},
					"mmlu_moral_disputes": {
						"acc,none": 0.3554913294797688,
						"acc_stderr,none": 0.025770292082977243,
						"alias": "  - moral_disputes"
					},
					"mmlu_moral_scenarios": {
						"acc,none": 0.2424581005586592,
						"acc_stderr,none": 0.014333522059217892,
						"alias": "  - moral_scenarios"
					},
					"mmlu_nutrition": {
						"acc,none": 0.35294117647058826,
						"acc_stderr,none": 0.027363593284684934,
						"alias": "  - nutrition"
					},
					"mmlu_other": {
						"acc,none": 0.3614419053749597,
						"acc_stderr,none": 0.06498932394179731,
						"alias": " - other"
					},
					"mmlu_philosophy": {
						"acc,none": 0.3729903536977492,
						"acc_stderr,none": 0.027466610213140105,
						"alias": "  - philosophy"
					},
					"mmlu_prehistory": {
						"acc,none": 0.3734567901234568,
						"acc_stderr,none": 0.026915003011380154,
						"alias": "  - prehistory"
					},
					"mmlu_professional_accounting": {
						"acc,none": 0.25886524822695034,
						"acc_stderr,none": 0.026129572527180848,
						"alias": "  - professional_accounting"
					},
					"mmlu_professional_law": {
						"acc,none": 0.2953063885267275,
						"acc_stderr,none": 0.011651061936208813,
						"alias": "  - professional_law"
					},
					"mmlu_professional_medicine": {
						"acc,none": 0.20588235294117646,
						"acc_stderr,none": 0.024562204314142314,
						"alias": "  - professional_medicine"
					},
					"mmlu_professional_psychology": {
						"acc,none": 0.30718954248366015,
						"acc_stderr,none": 0.018663359671463663,
						"alias": "  - professional_psychology"
					},
					"mmlu_public_relations": {
						"acc,none": 0.4,
						"acc_stderr,none": 0.0469237132203465,
						"alias": "  - public_relations"
					},
					"mmlu_security_studies": {
						"acc,none": 0.2530612244897959,
						"acc_stderr,none": 0.02783302387139968,
						"alias": "  - security_studies"
					},
					"mmlu_social_sciences": {
						"acc,none": 0.32791680207994806,
						"acc_stderr,none": 0.04868272895247769,
						"alias": " - social_sciences"
					},
					"mmlu_sociology": {
						"acc,none": 0.38308457711442784,
						"acc_stderr,none": 0.034375193373382504,
						"alias": "  - sociology"
					},
					"mmlu_stem": {
						"acc,none": 0.28893117665715184,
						"acc_stderr,none": 0.05445770965513818,
						"alias": " - stem"
					},
					"mmlu_us_foreign_policy": {
						"acc,none": 0.34,
						"acc_stderr,none": 0.047609522856952365,
						"alias": "  - us_foreign_policy"
					},
					"mmlu_virology": {
						"acc,none": 0.3072289156626506,
						"acc_stderr,none": 0.03591566797824664,
						"alias": "  - virology"
					},
					"mmlu_world_religions": {
						"acc,none": 0.4619883040935672,
						"acc_stderr,none": 0.03823727092882307,
						"alias": "  - world_religions"
					},
					"mnli": {
						"acc,none": 0.4152827305145186,
						"acc_stderr,none": 0.004974184264202521,
						"alias": "mnli"
					},
					"mnli_mismatch": {
						"acc,none": 0.4159886086248983,
						"acc_stderr,none": 0.0049711003495917045,
						"alias": "mnli_mismatch"
					},
					"mrpc": {
						"acc,none": 0.7034313725490197,
						"acc_stderr,none": 0.022639991831486735,
						"alias": "mrpc",
						"f1,none": 0.8180451127819549,
						"f1_stderr,none": 0.016228486872067776
					},
					"multimedqa": {
						"acc,none": 0.2936834634492548,
						"acc_norm,none": 0.2683186135984066,
						"acc_norm_stderr,none": 8.600489523348352e-05,
						"acc_stderr,none": 0.07174932081788213,
						"alias": "stem"
					},
					"multirc": {
						"acc,none": 0.5719884488448845,
						"acc_stderr,none": 0.007106976252751528,
						"alias": "multirc"
					},
					"mutual": {
						"alias": "mutual",
						"mrr,none": 0.7088976690112603,
						"mrr_stderr,none": 0.010286509606069513,
						"r@1,none": 0.22573363431151242,
						"r@1_stderr,none": 0.014053085820407473,
						"r@2,none": 0.4108352144469526,
						"r@2_stderr,none": 0.016537908550616855
					},
					"mutual_plus": {
						"alias": "mutual_plus",
						"mrr,none": 0.5962189635762243,
						"mrr_stderr,none": 0.00962849248063653,
						"r@1,none": 0.2595936794582393,
						"r@1_stderr,none": 0.014737047402750952,
						"r@2,none": 0.5395033860045146,
						"r@2_stderr,none": 0.016754777798868803
					},
					"openbookqa": {
						"acc,none": 0.288,
						"acc_norm,none": 0.406,
						"acc_norm_stderr,none": 0.021983962090086337,
						"acc_stderr,none": 0.02027150383507522,
						"alias": "openbookqa"
					},
					"paws_de": {
						"acc,none": 0.4145,
						"acc_stderr,none": 0.011018419931591758,
						"alias": " - paws_de"
					},
					"paws_en": {
						"acc,none": 0.417,
						"acc_stderr,none": 0.011027978425535497,
						"alias": " - paws_en"
					},
					"paws_es": {
						"acc,none": 0.396,
						"acc_stderr,none": 0.010938547705840854,
						"alias": " - paws_es"
					},
					"paws_fr": {
						"acc,none": 0.5375,
						"acc_stderr,none": 0.011151639095992292,
						"alias": " - paws_fr"
					},
					"paws_ja": {
						"acc,none": 0.525,
						"acc_stderr,none": 0.011169148353274967,
						"alias": " - paws_ja"
					},
					"paws_ko": {
						"acc,none": 0.4975,
						"acc_stderr,none": 0.011182996230990776,
						"alias": " - paws_ko"
					},
					"paws_zh": {
						"acc,none": 0.5235,
						"acc_stderr,none": 0.011170777418517836,
						"alias": " - paws_zh"
					},
					"pawsx": {
						"acc,none": 0.473,
						"acc_stderr,none": 0.04369243553504766,
						"alias": "pawsx"
					},
					"piqa": {
						"acc,none": 0.7589771490750816,
						"acc_norm,none": 0.7682263329706203,
						"acc_norm_stderr,none": 0.00984514377279403,
						"acc_stderr,none": 0.009979042717267312,
						"alias": " - piqa"
					},
					"prost": {
						"acc,none": 0.30855038428693426,
						"acc_norm,none": 0.3111656703672075,
						"acc_norm_stderr,none": 0.003382411025820201,
						"acc_stderr,none": 0.0033745546584643114,
						"alias": "prost"
					},
					"pubmedqa": {
						"acc,none": 0.558,
						"acc_stderr,none": 0.02223197069632112,
						"alias": "pubmedqa"
					},
					"pythia": {
						"acc,none": 0.7211627332042874,
						"acc_norm,none": 0.6209434214661425,
						"acc_norm_stderr,none": 0.01260100101361836,
						"acc_stderr,none": 0.1559323372737713,
						"alias": "pythia",
						"bits_per_byte,none": 0.6254666222299659,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.542709715488212,
						"byte_perplexity_stderr,none": "N/A",
						"perplexity,none": 4.206237682827478,
						"perplexity_stderr,none": 0.0927437074622225,
						"word_perplexity,none": 10.158756334994626,
						"word_perplexity_stderr,none": "N/A"
					},
					"qa4mre": {
						"acc,none": 0.41134751773049644,
						"acc_norm,none": 0.45567375886524825,
						"acc_norm_stderr,none": 0.05320843427753255,
						"acc_stderr,none": 0.039262787617228866,
						"alias": "qa4mre"
					},
					"qa4mre_2011": {
						"acc,none": 0.4666666666666667,
						"acc_norm,none": 0.5916666666666667,
						"acc_norm_stderr,none": 0.045058059858031296,
						"acc_stderr,none": 0.0457329560380023,
						"alias": " - qa4mre_2011"
					},
					"qa4mre_2012": {
						"acc,none": 0.4,
						"acc_norm,none": 0.45,
						"acc_norm_stderr,none": 0.03945381823835187,
						"acc_stderr,none": 0.03885143449429054,
						"alias": " - qa4mre_2012"
					},
					"qa4mre_2013": {
						"acc,none": 0.39436619718309857,
						"acc_norm,none": 0.4014084507042254,
						"acc_norm_stderr,none": 0.029138375022747656,
						"acc_stderr,none": 0.029051039507650152,
						"alias": " - qa4mre_2013"
					},
					"qnli": {
						"acc,none": 0.5632436390261761,
						"acc_stderr,none": 0.006711072327410219,
						"alias": "qnli"
					},
					"qqp": {
						"acc,none": 0.6767004699480583,
						"acc_stderr,none": 0.0023262386975602743,
						"alias": "qqp",
						"f1,none": 0.6837176664166283,
						"f1_stderr,none": 0.0026298961392616236
					},
					"race": {
						"acc,none": 0.3464114832535885,
						"acc_stderr,none": 0.014726451021782803,
						"alias": "race"
					},
					"rte": {
						"acc,none": 0.592057761732852,
						"acc_stderr,none": 0.029581952519606197,
						"alias": "rte"
					},
					"sciq": {
						"acc,none": 0.956,
						"acc_norm,none": 0.956,
						"acc_norm_stderr,none": 0.006488921798427419,
						"acc_stderr,none": 0.006488921798427419,
						"alias": " - sciq"
					},
					"sglue_rte": {
						"acc,none": 0.592057761732852,
						"acc_stderr,none": 0.029581952519606197,
						"alias": "sglue_rte"
					},
					"sst2": {
						"acc,none": 0.8463302752293578,
						"acc_stderr,none": 0.012219544510178476,
						"alias": "sst2"
					},
					"swag": {
						"acc,none": 0.5521343596920923,
						"acc_norm,none": 0.7505748275517344,
						"acc_norm_stderr,none": 0.003059129487544763,
						"acc_stderr,none": 0.003515822830353066,
						"alias": "swag"
					},
					"sycophancy": {
						"acc,none": 0.5383514691690793,
						"acc_stderr,none": 0.025050820977546028,
						"alias": "sycophancy"
					},
					"sycophancy_on_nlp_survey": {
						"acc,none": 0.5105168269230769,
						"acc_stderr,none": 0.0050031483261585834,
						"alias": " - sycophancy_on_nlp_survey"
					},
					"sycophancy_on_philpapers2020": {
						"acc,none": 0.6030201682375596,
						"acc_stderr,none": 0.004925831876678817,
						"alias": " - sycophancy_on_philpapers2020"
					},
					"sycophancy_on_political_typology_quiz": {
						"acc,none": 0.5030392156862745,
						"acc_stderr,none": 0.004950888952356967,
						"alias": " - sycophancy_on_political_typology_quiz"
					},
					"truthfulqa": {
						"acc,none": 0.3055022521837347,
						"acc_stderr,none": 0.03656239278774286,
						"alias": "truthfulqa",
						"bleu_acc,none": 0.3011015911872705,
						"bleu_acc_stderr,none": 0.00025789144972030077,
						"bleu_diff,none": -9.001934630605374,
						"bleu_diff_stderr,none": 0.609706773931244,
						"bleu_max,none": 25.803721470815315,
						"bleu_max_stderr,none": 0.5939472986331531,
						"rouge1_acc,none": 0.2766217870257038,
						"rouge1_acc_stderr,none": 0.00024522325241226745,
						"rouge1_diff,none": -11.768159276195956,
						"rouge1_diff_stderr,none": 0.6596934811127005,
						"rouge1_max,none": 51.491984755493874,
						"rouge1_max_stderr,none": 0.7115419760726746,
						"rouge2_acc,none": 0.2460220318237454,
						"rouge2_acc_stderr,none": 0.00022732253882482836,
						"rouge2_diff,none": -13.766156782955843,
						"rouge2_diff_stderr,none": 0.9754279826781366,
						"rouge2_max,none": 35.032337261676204,
						"rouge2_max_stderr,none": 0.9710174456585365,
						"rougeL_acc,none": 0.26805385556915545,
						"rougeL_acc_stderr,none": 0.00024044238490645661,
						"rougeL_diff,none": -12.018707216661447,
						"rougeL_diff_stderr,none": 0.6824768950198414,
						"rougeL_max,none": 48.40620987330686,
						"rougeL_max_stderr,none": 0.7391060437300081
					},
					"truthfulqa_gen": {
						"alias": " - truthfulqa_gen",
						"bleu_acc,none": 0.3011015911872705,
						"bleu_acc_stderr,none": 0.016058999026100623,
						"bleu_diff,none": -9.001934630605374,
						"bleu_diff_stderr,none": 0.7808372262714195,
						"bleu_max,none": 25.803721470815315,
						"bleu_max_stderr,none": 0.7706797639961446,
						"rouge1_acc,none": 0.2766217870257038,
						"rouge1_acc_stderr,none": 0.015659605755326902,
						"rouge1_diff,none": -11.768159276195956,
						"rouge1_diff_stderr,none": 0.8122151692210018,
						"rouge1_max,none": 51.491984755493874,
						"rouge1_max_stderr,none": 0.8435294755209651,
						"rouge2_acc,none": 0.2460220318237454,
						"rouge2_acc_stderr,none": 0.01507721920066258,
						"rouge2_diff,none": -13.766156782955843,
						"rouge2_diff_stderr,none": 0.9876375765826939,
						"rouge2_max,none": 35.032337261676204,
						"rouge2_max_stderr,none": 0.9854021745757092,
						"rougeL_acc,none": 0.26805385556915545,
						"rougeL_acc_stderr,none": 0.015506204722834553,
						"rougeL_diff,none": -12.018707216661447,
						"rougeL_diff_stderr,none": 0.8261215982044299,
						"rougeL_max,none": 48.40620987330686,
						"rougeL_max_stderr,none": 0.8597127681557417
					},
					"truthfulqa_mc1": {
						"acc,none": 0.23378212974296206,
						"acc_stderr,none": 0.014816195991931591,
						"alias": " - truthfulqa_mc1"
					},
					"truthfulqa_mc2": {
						"acc,none": 0.3413623134041211,
						"acc_stderr,none": 0.01342712142852591,
						"alias": " - truthfulqa_mc2"
					},
					"webqs": {
						"alias": "webqs",
						"exact_match,none": 0.2263779527559055,
						"exact_match_stderr,none": 0.009285953859206367
					},
					"wic": {
						"acc,none": 0.493730407523511,
						"acc_stderr,none": 0.019809163801196517,
						"alias": "wic"
					},
					"wikitext": {
						"alias": "wikitext",
						"bits_per_byte,none": 0.6254666222299659,
						"bits_per_byte_stderr,none": "N/A",
						"byte_perplexity,none": 1.542709715488212,
						"byte_perplexity_stderr,none": "N/A",
						"word_perplexity,none": 10.158756334994626,
						"word_perplexity_stderr,none": "N/A"
					},
					"winogrande": {
						"acc,none": 0.654301499605367,
						"acc_stderr,none": 0.013366596951934375,
						"alias": " - winogrande"
					},
					"wnli": {
						"acc,none": 0.43661971830985913,
						"acc_stderr,none": 0.0592793555841297,
						"alias": "wnli"
					},
					"wsc": {
						"acc,none": 0.375,
						"acc_stderr,none": 0.04770204856076104,
						"alias": " - wsc"
					},
					"wsc273": {
						"acc,none": 0.7985347985347986,
						"acc_stderr,none": 0.02431993962718263,
						"alias": "wsc273"
					},
					"xcopa": {
						"acc,none": 0.5285454545454547,
						"acc_stderr,none": 0.036469924902244086,
						"alias": "xcopa"
					},
					"xcopa_et": {
						"acc,none": 0.48,
						"acc_stderr,none": 0.022365160424231333,
						"alias": " - xcopa_et"
					},
					"xcopa_ht": {
						"acc,none": 0.52,
						"acc_stderr,none": 0.022365160424231336,
						"alias": " - xcopa_ht"
					},
					"xcopa_id": {
						"acc,none": 0.548,
						"acc_stderr,none": 0.022279694107843428,
						"alias": " - xcopa_id"
					},
					"xcopa_it": {
						"acc,none": 0.606,
						"acc_stderr,none": 0.021874299301689253,
						"alias": " - xcopa_it"
					},
					"xcopa_qu": {
						"acc,none": 0.484,
						"acc_stderr,none": 0.022371610982580396,
						"alias": " - xcopa_qu"
					},
					"xcopa_sw": {
						"acc,none": 0.512,
						"acc_stderr,none": 0.02237662679792717,
						"alias": " - xcopa_sw"
					},
					"xcopa_ta": {
						"acc,none": 0.536,
						"acc_stderr,none": 0.022324981738385253,
						"alias": " - xcopa_ta"
					},
					"xcopa_th": {
						"acc,none": 0.538,
						"acc_stderr,none": 0.02231833811987053,
						"alias": " - xcopa_th"
					},
					"xcopa_tr": {
						"acc,none": 0.534,
						"acc_stderr,none": 0.02233126442325838,
						"alias": " - xcopa_tr"
					},
					"xcopa_vi": {
						"acc,none": 0.502,
						"acc_stderr,none": 0.022382894986483524,
						"alias": " - xcopa_vi"
					},
					"xcopa_zh": {
						"acc,none": 0.554,
						"acc_stderr,none": 0.022252153078595897,
						"alias": " - xcopa_zh"
					},
					"xnli": {
						"acc,none": 0.3856760374832664,
						"acc_stderr,none": 0.05028935202602867,
						"alias": "xnli"
					},
					"xnli_ar": {
						"acc,none": 0.3393574297188755,
						"acc_stderr,none": 0.009490727635646757,
						"alias": " - xnli_ar"
					},
					"xnli_bg": {
						"acc,none": 0.40200803212851405,
						"acc_stderr,none": 0.009827715873484718,
						"alias": " - xnli_bg"
					},
					"xnli_de": {
						"acc,none": 0.43453815261044176,
						"acc_stderr,none": 0.009935807354856826,
						"alias": " - xnli_de"
					},
					"xnli_el": {
						"acc,none": 0.37389558232931724,
						"acc_stderr,none": 0.009698087600721298,
						"alias": " - xnli_el"
					},
					"xnli_en": {
						"acc,none": 0.5317269076305221,
						"acc_stderr,none": 0.010001876146466693,
						"alias": " - xnli_en"
					},
					"xnli_es": {
						"acc,none": 0.41887550200803214,
						"acc_stderr,none": 0.009889278882314558,
						"alias": " - xnli_es"
					},
					"xnli_fr": {
						"acc,none": 0.45903614457831327,
						"acc_stderr,none": 0.009988381409296447,
						"alias": " - xnli_fr"
					},
					"xnli_hi": {
						"acc,none": 0.3493975903614458,
						"acc_stderr,none": 0.009556642460138147,
						"alias": " - xnli_hi"
					},
					"xnli_ru": {
						"acc,none": 0.4457831325301205,
						"acc_stderr,none": 0.009962979511168332,
						"alias": " - xnli_ru"
					},
					"xnli_sw": {
						"acc,none": 0.3457831325301205,
						"acc_stderr,none": 0.009533455033752756,
						"alias": " - xnli_sw"
					},
					"xnli_th": {
						"acc,none": 0.3333333333333333,
						"acc_stderr,none": 0.009448900914617623,
						"alias": " - xnli_th"
					},
					"xnli_tr": {
						"acc,none": 0.3405622489959839,
						"acc_stderr,none": 0.009498886690274443,
						"alias": " - xnli_tr"
					},
					"xnli_ur": {
						"acc,none": 0.3329317269076305,
						"acc_stderr,none": 0.009446051001358226,
						"alias": " - xnli_ur"
					},
					"xnli_vi": {
						"acc,none": 0.3441767068273092,
						"acc_stderr,none": 0.00952295446980603,
						"alias": " - xnli_vi"
					},
					"xnli_zh": {
						"acc,none": 0.3337349397590361,
						"acc_stderr,none": 0.009451743112667058,
						"alias": " - xnli_zh"
					},
					"xstorycloze": {
						"acc,none": 0.5594127910474701,
						"acc_stderr,none": 0.0925333616404555,
						"alias": "xstorycloze"
					},
					"xstorycloze_ar": {
						"acc,none": 0.4831237590999338,
						"acc_stderr,none": 0.012859793919977602,
						"alias": " - xstorycloze_ar"
					},
					"xstorycloze_en": {
						"acc,none": 0.8550628722700199,
						"acc_stderr,none": 0.009059419624385553,
						"alias": " - xstorycloze_en"
					},
					"xstorycloze_es": {
						"acc,none": 0.628722700198544,
						"acc_stderr,none": 0.012433411152341697,
						"alias": " - xstorycloze_es"
					},
					"xstorycloze_eu": {
						"acc,none": 0.5029781601588352,
						"acc_stderr,none": 0.012866897066011233,
						"alias": " - xstorycloze_eu"
					},
					"xstorycloze_hi": {
						"acc,none": 0.5076108537392455,
						"acc_stderr,none": 0.01286563457111448,
						"alias": " - xstorycloze_hi"
					},
					"xstorycloze_id": {
						"acc,none": 0.5049636002647253,
						"acc_stderr,none": 0.012866491277589953,
						"alias": " - xstorycloze_id"
					},
					"xstorycloze_my": {
						"acc,none": 0.4884182660489742,
						"acc_stderr,none": 0.012863672949335898,
						"alias": " - xstorycloze_my"
					},
					"xstorycloze_ru": {
						"acc,none": 0.586366644606221,
						"acc_stderr,none": 0.012673714851823765,
						"alias": " - xstorycloze_ru"
					},
					"xstorycloze_sw": {
						"acc,none": 0.5062872270019855,
						"acc_stderr,none": 0.012866108021218212,
						"alias": " - xstorycloze_sw"
					},
					"xstorycloze_te": {
						"acc,none": 0.5486432825943084,
						"acc_stderr,none": 0.012806088966122401,
						"alias": " - xstorycloze_te"
					},
					"xstorycloze_zh": {
						"acc,none": 0.5413633355393779,
						"acc_stderr,none": 0.01282302034016982,
						"alias": " - xstorycloze_zh"
					},
					"xwinograd": {
						"acc,none": 0.7412901775679928,
						"acc_stderr,none": 0.06363409522532021,
						"alias": "xwinograd"
					},
					"xwinograd_en": {
						"acc,none": 0.8550537634408603,
						"acc_stderr,none": 0.007302677492920841,
						"alias": " - xwinograd_en"
					},
					"xwinograd_fr": {
						"acc,none": 0.6506024096385542,
						"acc_stderr,none": 0.0526515135644047,
						"alias": " - xwinograd_fr"
					},
					"xwinograd_jp": {
						"acc,none": 0.5828988529718456,
						"acc_stderr,none": 0.01593068821193874,
						"alias": " - xwinograd_jp"
					},
					"xwinograd_pt": {
						"acc,none": 0.6653992395437263,
						"acc_stderr,none": 0.02915103415331038,
						"alias": " - xwinograd_pt"
					},
					"xwinograd_ru": {
						"acc,none": 0.6126984126984127,
						"acc_stderr,none": 0.02749053501130577,
						"alias": " - xwinograd_ru"
					},
					"xwinograd_zh": {
						"acc,none": 0.6527777777777778,
						"acc_stderr,none": 0.021227675707409237,
						"alias": " - xwinograd_zh"
					}
				}
			}
		},
		"name": "togethercomputer/RedPajama-INCITE-7B-Instruct"
	}
}